public void TestAddAll() { var set = new HashSet<string>() { "a", "b", "c" }; Assert.False(set.AddAll(new[] { "b", "c" })); Assert.True(set.AddAll(new[] { "c", "d" })); Assert.True(set.AddAll(new[] { "e", "f" })); }
public virtual void TestIndexing() { DirectoryInfo tmpDir = CreateTempDir("TestNeverDelete"); BaseDirectoryWrapper d = NewFSDirectory(tmpDir); // We want to "see" files removed if Lucene removed // them. this is still worth running on Windows since // some files the IR opens and closes. if (d is MockDirectoryWrapper) { ((MockDirectoryWrapper)d).NoDeleteOpenFile = false; } RandomIndexWriter w = new RandomIndexWriter(Random(), d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); w.w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 5, 30)); w.Commit(); ThreadClass[] indexThreads = new ThreadClass[Random().Next(4)]; long stopTime = Environment.TickCount + AtLeast(1000); for (int x = 0; x < indexThreads.Length; x++) { indexThreads[x] = new ThreadAnonymousInnerClassHelper(w, stopTime); indexThreads[x].Name = "Thread " + x; indexThreads[x].Start(); } HashSet<string> allFiles = new HashSet<string>(); DirectoryReader r = DirectoryReader.Open(d); while (Environment.TickCount < stopTime) { IndexCommit ic = r.IndexCommit; if (VERBOSE) { Console.WriteLine("TEST: check files: " + ic.FileNames); } allFiles.AddAll(ic.FileNames); // Make sure no old files were removed foreach (string fileName in allFiles) { Assert.IsTrue(SlowFileExists(d, fileName), "file " + fileName + " does not exist"); } DirectoryReader r2 = DirectoryReader.OpenIfChanged(r); if (r2 != null) { r.Dispose(); r = r2; } Thread.Sleep(1); } r.Dispose(); foreach (ThreadClass t in indexThreads) { t.Join(); } w.Dispose(); d.Dispose(); System.IO.Directory.Delete(tmpDir.FullName, true); }
private static IEnumerable <string> GetLanguageSpecificAnalyzerTypeNames(ImmutableDictionary <string, ImmutableHashSet <string> > analyzerTypeNameMap, string languageOpt) { HashSet <string> languageSpecificAnalyzerTypeNames = new HashSet <string>(); if (languageOpt == null) { // If the user didn't ask for a specific language then return all language specific analyzers. languageSpecificAnalyzerTypeNames.AddAll(analyzerTypeNameMap.SelectMany(kvp => kvp.Key != string.Empty ? kvp.Value : SpecializedCollections.EmptyEnumerable <string>())); } else { // Add the analyzers for the specific language. if (analyzerTypeNameMap.ContainsKey(languageOpt)) { languageSpecificAnalyzerTypeNames.AddAll(analyzerTypeNameMap[languageOpt]); } } return(languageSpecificAnalyzerTypeNames); }
// Bad implementation from a performance point of view: it may be // costly to build the result collection and possibly only its // size is ever used: public ICollection <T> RangeFromTo(Q x, Q y) { var result = new HashSet <T>(EqualityComparer <T> .Default); foreach (var kv in _dictionary.RangeFromTo(x, y)) { result.AddAll(kv.Value); } return(result); }
/// <summary> /// Return a set of int values representing the value of the parameter for the given range. /// </summary> /// <param name="min">lower end of range</param> /// <param name="max">upper end of range</param> /// <returns>set of integer</returns> public ICollection <int> GetValuesInRange(int min, int max) { ICollection <int> result = new HashSet <int>(); foreach (NumberSetParameter param in _parameters) { result.AddAll(param.GetValuesInRange(min, max)); } return(result); }
public static IEnumerable <IEdge> GetEdges(IEnumerable <INode> nodes) { HashSet <IEdge> edges = new HashSet <IEdge>(); foreach (var node in nodes) { edges.AddAll(node.Edges); } return(edges); }
private void Compute() { var unmatchedLeftNodes = new HashSet <LeftNode>(); unmatchedLeftNodes.AddAll(_leftNodes); _foundAugmentingPath = true; Debug.Print("Compute start"); while (_foundAugmentingPath) { Debug.Print("Start outer"); _foundAugmentingPath = false; _endPoints = new HashSet <RightNode>(); foreach (var rightNode in _rightNodes) { rightNode.BackRef = null; } foreach (LeftNode l in unmatchedLeftNodes) { Debug.Print("Unmatched: {0}", l); Search(l, l); } while (!_foundAugmentingPath && _endPoints.Count > 0) { var oldLayer = _endPoints; _endPoints = new HashSet <RightNode>(); foreach (var rb in oldLayer) { Search(rb.Match, rb.Origin); } } if (_endPoints.Count == 0) { return; } //Flip Debug.Print("Flip"); foreach (RightNode r in _endPoints) { if (r.Match == null && unmatchedLeftNodes.Contains(r.Origin)) { RightNode nextR = r; LeftNode nextL = null; while (nextR != null) { nextL = nextR.Match = nextR.BackRef; var rSwap = nextL.Match; nextL.Match = nextR; nextR = rSwap; } unmatchedLeftNodes.Remove(nextL); } } } }
void Compute() { HashSet <LeftNode> unmatchedLeftNodes = new HashSet <LeftNode>(); unmatchedLeftNodes.AddAll(leftNodes); foundAugmentingPath = true; Debug.Print("Compute start"); while (foundAugmentingPath) { Debug.Print("Start outer"); foundAugmentingPath = false; endPoints = new HashSet <RightNode>(); foreach (RightNode rightNode in rightNodes) { rightNode.backref = null; } foreach (LeftNode l in unmatchedLeftNodes) { Debug.Print("Unmatched: {0}", l); search(l, l); } while (!foundAugmentingPath && endPoints.Count > 0) { HashSet <RightNode> oldLayer = endPoints; endPoints = new HashSet <RightNode>(); foreach (RightNode rb in oldLayer) { search(rb.match, rb.origin); } } if (endPoints.Count == 0) { return; } //Flip Debug.Print("Flip"); foreach (RightNode r in endPoints) { if (r.match == null && unmatchedLeftNodes.Contains(r.origin)) { RightNode nextR = r; LeftNode nextL = null; while (nextR != null) { nextL = nextR.match = nextR.backref; RightNode rSwap = nextL.match; nextL.match = nextR; nextR = rSwap; } unmatchedLeftNodes.Remove(nextL); } } } }
public void DuplicateNumber_ShouldReturnFalse() { var numbers = new List <int> { 1, 1, 2, 3, 5, 8 }; ISet <int> numberSet = new HashSet <int>(numbers.Count); var allNumbersAdded = numberSet.AddAll(numbers); allNumbersAdded.Should().BeFalse(); numberSet.Count.Should().Be(numbers.Count - 1); }
protected internal virtual ICollection <string> CollectProcessInstanceIds(CommandContext commandContext) { ISet <string> collectedProcessInstanceIds = new HashSet <string>(); IList <string> processInstanceIds = builder.ProcessInstanceIds; if (processInstanceIds != null) { collectedProcessInstanceIds.AddAll(processInstanceIds); } var processInstanceQuery = builder.ProcessInstanceQuery; if (processInstanceQuery != null) { collectedProcessInstanceIds.AddAll(processInstanceQuery.Select(c => c.Id).ToList()); } return(collectedProcessInstanceIds); }
public void InsertAllInSet_ShouldReturnTrue() { var numbers = new List <int> { 1, 2, 3, 4, 5 }; ISet <int> numberSet = new HashSet <int>(numbers.Count); var allNumbersAdded = numberSet.AddAll(numbers); allNumbersAdded.Should().BeTrue(); numberSet.Count.Should().Be(numbers.Count); AssertAllIncludedInSet(numberSet, numbers); }
/// <summary> /// Add item conditionally. /// </summary> public static void AddIf <T>(this HashSet <T> hashSet, bool condition, params T[] elements) { if (hashSet == null) { throw new ArgumentNullException(nameof(hashSet)); } if (condition) { hashSet.AddAll(elements); } }
void AddAll_adds_all_specified_items_to_collection() { var numbers = new HashSet <int> { 1, 2, 3 }; var newNumbers = new[] { 4, 5 }; var expectedNumbers = new[] { 1, 2, 3, 4, 5 }; numbers.AddAll(newNumbers); numbers.Should().BeEquivalentTo(expectedNumbers); }
private UndeploymentResult UndeployRemoveInternal(DeploymentInformation info, UndeploymentOptions undeploymentOptions) { var reverted = new DeploymentInformationItem[info.Items.Length]; for (var i = 0; i < info.Items.Length; i++) { reverted[i] = info.Items[info.Items.Length - 1 - i]; } var revertedStatements = new List <DeploymentInformationItem>(); if (undeploymentOptions.IsDestroyStatements) { var referencedTypes = new HashSet <String>(); Exception firstExceptionEncountered = null; foreach (var item in reverted) { var statement = _epService.GetStatement(item.StatementName); if (statement == null) { Log.Debug("Deployment id '" + info.DeploymentId + "' statement name '" + item + "' not found"); continue; } referencedTypes.AddAll(_statementEventTypeRef.GetTypesForStatementName(statement.Name)); if (statement.IsDisposed) { continue; } try { statement.Dispose(); } catch (Exception ex) { Log.Warn("Unexpected exception destroying statement: " + ex.Message, ex); if (firstExceptionEncountered == null) { firstExceptionEncountered = ex; } } revertedStatements.Add(item); } EPLModuleUtil.UndeployTypes(referencedTypes, _statementEventTypeRef, _eventAdapterService, _filterService); revertedStatements.Reverse(); if (firstExceptionEncountered != null && _undeployRethrowPolicy == ConfigurationEngineDefaults.UndeployRethrowPolicy.RETHROW_FIRST) { throw firstExceptionEncountered; } } return(new UndeploymentResult(info.DeploymentId, revertedStatements)); }
public virtual ISet FindCrls(X509CrlStoreSelector crlselect, PkixParameters paramsPkix, DateTime currentDate) { ISet set = new HashSet(); try { set.AddAll(FindCrls(crlselect, paramsPkix.GetAdditionalStores())); set.AddAll(FindCrls(crlselect, paramsPkix.GetStores())); } catch (Exception innerException) { throw new Exception("Exception obtaining complete CRLs.", innerException); } ISet set2 = new HashSet(); DateTime dateTime = currentDate; if (paramsPkix.Date != null) { dateTime = paramsPkix.Date.Value; } foreach (X509Crl item in set) { if (item.NextUpdate.Value.CompareTo((object)dateTime) > 0) { X509Certificate certificateChecking = crlselect.CertificateChecking; if (certificateChecking != null) { if (item.ThisUpdate.CompareTo((object)certificateChecking.NotAfter) < 0) { set2.Add(item); } } else { set2.Add(item); } } } return(set2); }
public virtual IDictionary <String, PdfObject> GetNames() { if (items.Count > 0) { return(items); } PdfDictionary dictionary = catalog.GetPdfObject().GetAsDictionary(PdfName.Names); if (dictionary != null) { dictionary = dictionary.GetAsDictionary(treeType); if (dictionary != null) { items = ReadTree(dictionary); //@TODO It's done for auto porting to itextsharp, cuz u cannot change collection which you iterate // in for loop (even if you change only value of a Map entry) in .NET. Java doesn't have such a problem. // We should find a better solution in the future. ICollection <String> keys = new HashSet <String>(); keys.AddAll(items.Keys); foreach (String key in keys) { PdfArray arr = GetNameArray(items.Get(key)); if (arr != null) { items[key] = arr; } else { items.JRemove(key); } } } } if (treeType.Equals(PdfName.Dests)) { PdfDictionary destinations = catalog.GetPdfObject().GetAsDictionary(PdfName.Dests); if (destinations != null) { ICollection <PdfName> keys = destinations.KeySet(); foreach (PdfName key in keys) { PdfArray array = GetNameArray(destinations.Get(key)); if (array == null) { continue; } items[key.GetValue()] = array; } } } return(items); }
private ISet <string> GetLogicTableNames(string actualDataSourceName) { ISet <string> result = new HashSet <string>(); foreach (var routeUnit in _routeUnits) { if (actualDataSourceName.EqualsIgnoreCase(routeUnit.DataSourceMapper.ActualName)) { result.AddAll(routeUnit.GetLogicTableNames()); } } return(result); }
public void AddAll() { hashset.Add(3); hashset.Add(4); hashset.Add(5); HashSet <int> hashset2 = new HashSet <int>(); hashset2.AddAll(hashset); Assert.IsTrue(IC.seteq(hashset2, 3, 4, 5)); hashset.Add(9); hashset.AddAll(hashset2); Assert.IsTrue(IC.seteq(hashset2, 3, 4, 5)); Assert.IsTrue(IC.seteq(hashset, 3, 4, 5, 9)); }
public void TraverseActionCanGetAllVertices() { WeightedGraph <int> graph = new WeightedGraph <int> { (1, 2), (2, 3), (3, 4), (1, 4), (5, 2), (7, 2) }; HashSet <int> vertices = new HashSet <int>(); Dfs <int> dfs = new Dfs <int>(graph, edge => vertices.AddAll(new [] { edge.From, edge.To })); Assert.AreEqual(vertices.Count, graph.GetVertices().Count); }
public virtual void Inform(IResourceLoader loader) { IEnumerable<string> files = SplitFileNames(stopTypesFiles); if (files.Count() > 0) { stopTypes = new HashSet<string>(); foreach (string file in files) { IEnumerable<string> typesLines = GetLines(loader, file.Trim()); stopTypes.AddAll(typesLines); } } }
private async Task FindResultsInUnreferencedMetadataSymbolsAsync( ConcurrentDictionary <PortableExecutableReference, Compilation> referenceToCompilation, Project project, List <Reference> allSymbolReferences, SymbolReferenceFinder finder, bool exact, CancellationToken cancellationToken) { if (allSymbolReferences.Count > 0) { // Only do this if none of the project searches produced any results. We may have a // lot of metadata to search through, and it would be good to avoid that if we can. return; } // Keep track of the references we've seen (so that we don't process them multiple times // across many sibling projects). Prepopulate it with our own metadata references since // we know we don't need to search in that. var seenReferences = new HashSet <PortableExecutableReference>(comparer: this); seenReferences.AddAll(project.MetadataReferences.OfType <PortableExecutableReference>()); var newReferences = project.Solution.Projects.Where(p => p != project) .SelectMany(p => p.MetadataReferences.OfType <PortableExecutableReference>()) .Distinct(comparer: this) .Where(r => !seenReferences.Contains(r)) .Where(r => !IsInPackagesDirectory(r)); // Search all metadata references in parallel. var findTasks = new HashSet <Task <List <SymbolReference> > >(); // Create another cancellation token so we can both search all projects in parallel, // but also stop any searches once we get enough results. using (var nestedTokenSource = new CancellationTokenSource()) using (var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(nestedTokenSource.Token, cancellationToken)) { foreach (var reference in newReferences) { var compilation = referenceToCompilation.GetOrAdd(reference, r => CreateCompilation(project, r)); // Ignore netmodules. First, they're incredibly esoteric and barely used. // Second, the SymbolFinder api doesn't even support searching them. var assembly = compilation.GetAssemblyOrModuleSymbol(reference) as IAssemblySymbol; if (assembly != null) { findTasks.Add(finder.FindInMetadataSymbolsAsync( assembly, reference, exact, linkedTokenSource.Token)); } } await WaitForTasksAsync(allSymbolReferences, findTasks, nestedTokenSource, cancellationToken).ConfigureAwait(false); } }
private static IList<ExprTableAccessNode> DetermineTableAccessNodes( ISet<ExprTableAccessNode> statementDirectTableAccess, ExprNodeSubselectDeclaredDotVisitor visitor) { ISet<ExprTableAccessNode> tableAccessNodes = new HashSet<ExprTableAccessNode>(); if (statementDirectTableAccess != null) { tableAccessNodes.AddAll(statementDirectTableAccess); } // include all declared expression usages var tableAccessVisitor = new ExprNodeTableAccessVisitor(tableAccessNodes); foreach (var declared in visitor.DeclaredExpressions) { declared.Body.Accept(tableAccessVisitor); } // include all subqueries (and their declared expressions) // This is nested as declared expressions can have more subqueries, however all subqueries are in this list. foreach (var subselectNode in visitor.Subselects) { tableAccessNodes.AddAll(subselectNode.StatementSpecRaw.TableExpressions); } return new List<ExprTableAccessNode>(tableAccessNodes); }
public virtual void Inform(IResourceLoader loader) { IEnumerable <string> files = SplitFileNames(stopTypesFiles); if (files.Count() > 0) { stopTypes = new HashSet <string>(); foreach (string file in files) { IEnumerable <string> typesLines = GetLines(loader, file.Trim()); stopTypes.AddAll(typesLines); } } }
protected internal virtual ICollection <Coords> CompileIgnoredAreas(IWebDriver driver, CoordsPreparationStrategy preparationStrategy) { lock (this) { ICollection <Coords> ignoredCoords = new HashSet <Coords>(); foreach (By ignoredLocator in this.ignoredLocators) { IList <IWebElement> ignoredElements = driver.FindElements(ignoredLocator); if (!ignoredElements.IsEmpty()) { ignoredCoords.AddAll(preparationStrategy.prepare(this.coordsProvider.ofElements(driver, ignoredElements.AsEnumerable()))); } } foreach (Coords ignoredArea in this.ignoredAreas) { ignoredCoords.AddAll(preparationStrategy.prepare(new List <Coords> { ignoredArea })); } return(ignoredCoords); } }
private static Module GetModule( string name, params string[] uses) { ISet<string> usesSet = new HashSet<string>(); usesSet.AddAll(Arrays.AsList(uses)); return new Module( name, null, usesSet, EmptySet<Import>.Instance, EmptyList<ModuleItem>.Instance, null); }
public virtual ISet FindCrls(X509CrlStoreSelector crlselect, PkixParameters paramsPkix) { ISet set = new HashSet(); try { set.AddAll(FindCrls(crlselect, paramsPkix.GetStores())); return(set); } catch (Exception innerException) { throw new Exception("Exception obtaining complete CRLs.", innerException); } }
public virtual void Inform(ResourceLoader loader) { IList <string> files = splitFileNames(stopTypesFiles); if (files.Count > 0) { stopTypes = new HashSet <string>(); foreach (string file in files) { IList <string> typesLines = getLines(loader, file.Trim()); stopTypes.AddAll(typesLines); } } }
public virtual ICollection <EventType> GetSupportedEvents() { ICollection <EventType> evts = new HashSet <EventType>(); foreach (ILocationExtractionStrategy s in strategies) { ICollection <EventType> se = s.GetSupportedEvents(); if (se != null) { evts.AddAll(se); } } return(evts.IsEmpty() ? null : evts); }
public virtual ISet FindCrls(X509CrlStoreSelector crlselect, PkixParameters paramsPkix) { ISet set = new HashSet(); try { set.AddAll((global::System.Collections.IEnumerable)FindCrls(crlselect, paramsPkix.GetStores())); return(set); } catch (global::System.Exception ex) { throw new global::System.Exception("Exception obtaining complete CRLs.", ex); } }
public virtual PkixCertPathBuilderResult Build(PkixBuilderParameters pkixParams) { IX509Selector targetCertConstraints = pkixParams.GetTargetCertConstraints(); if (!(targetCertConstraints is X509CertStoreSelector)) { throw new PkixCertPathBuilderException(string.Concat(new object[] { "TargetConstraints must be an instance of ", typeof(X509CertStoreSelector).FullName, " for ", base.GetType(), " class." })); } ISet set = new HashSet(); try { set.AddAll(PkixCertPathValidatorUtilities.FindCertificates((X509CertStoreSelector)targetCertConstraints, pkixParams.GetStores())); } catch (Exception exception) { throw new PkixCertPathBuilderException("Error finding target certificate.", exception); } if (set.IsEmpty) { throw new PkixCertPathBuilderException("No certificate found matching targetContraints."); } PkixCertPathBuilderResult pkixCertPathBuilderResult = null; IList tbvPath = Platform.CreateArrayList(); foreach (X509Certificate tbvCert in set) { pkixCertPathBuilderResult = this.Build(tbvCert, pkixParams, tbvPath); if (pkixCertPathBuilderResult != null) { break; } } if (pkixCertPathBuilderResult == null && this.certPathException != null) { throw new PkixCertPathBuilderException(this.certPathException.Message, this.certPathException.InnerException); } if (pkixCertPathBuilderResult == null && this.certPathException == null) { throw new PkixCertPathBuilderException("Unable to find certificate chain."); } return(pkixCertPathBuilderResult); }
private static void AddDiagnosticInfos(ref HashSet <DiagnosticInfo> useSiteDiagnostics, CompoundUseSiteInfo <AssemblySymbol> useSiteInfo) { if (useSiteInfo.Diagnostics is object) { if (useSiteDiagnostics is null) { useSiteDiagnostics = (HashSet <DiagnosticInfo>)useSiteInfo.Diagnostics; } else { useSiteDiagnostics.AddAll(useSiteInfo.Diagnostics); } } }
public IEnumerable <IEnumerable <ProjectId> > GetConnectedProjects(CancellationToken cancellationToken) { var topologicallySortedProjects = this.GetTopologicallySortedProjects(cancellationToken); var result = new List <IEnumerable <ProjectId> >(); var seenProjects = new HashSet <ProjectId>(); #if false Example: A < --B | --C D < --E | --F #endif // Process the projects in topological order (i.e. build order). This means that the // project that has things depending on it will show up first. Given the above example // the topological sort will produce something like: #if false A B D C E F #endif foreach (var project in topologicallySortedProjects) { if (seenProjects.Add(project)) { // We've never seen this project before. That means it's either A or D. Walk // that project to see all the things that depend on it transitively and make a // connected component out of that. If it's A then we'll add 'B' and 'C' and // consider them 'seen'. Then we'll ignore those projects in the outer loop // until we get to 'D'. var connectedGroup = new HashSet <ProjectId>(); connectedGroup.AddAll(GetTransitivelyConnectedProjects(project)); seenProjects.AddAll(connectedGroup); result.Add(connectedGroup); } } #if false Other case: A < --B C < - / #endif return(result); }
public void Test_AddAll_OnHashSet_Ok() { ICollection <int> collection = new HashSet <int> { 0 }; collection.AddAll(new List <int> { 1, 2 }); Assert.Equal(3, collection.Count); Assert.Contains(collection, number => number == 0); Assert.Contains(collection, number => number == 1); Assert.Contains(collection, number => number == 2); }
public void TestAddAllAndForEach() { // Arrange var setA = new HashSet<int>(); setA.Add(1); setA.Add(2); setA.Add(3); var setB = new HashSet<int>(); setB.Add(2); setB.Add(4); setB.Add(6); var expectedValues = new int[] { 1, 2, 3, 4, 6 }; // Act setA.AddAll(setB); // Assert Assert.AreEqual(expectedValues.Length, setA.Count()); expectedValues.ForEach(expected => Assert.IsTrue(setA.Contains(expected))); }
/// <summary> /// The resolve references internal. /// </summary> /// <param name="sdmxObjects"> The sdmxObjects. </param> /// <param name="retrievalManager"> The retrieval manager. </param> /// <param name="populateMissingMap"> The populate missing map. </param> /// <returns> The <see cref="IIdentifiableObject" /> dictionary. </returns> /// <exception cref="CrossReferenceException">Reference error</exception> private IDictionaryOfSets<IIdentifiableObject, IIdentifiableObject> ResolveReferencesInternal( ISdmxObjects sdmxObjects, IIdentifiableRetrievalManager retrievalManager, IDictionary<IIdentifiableObject, ISet<ICrossReference>> populateMissingMap) { _log.Info("Resolve References, bean retrieval manager: " + retrievalManager); /* foreach */ foreach (IAgency currentAgency in sdmxObjects.Agencies) { this._agencies.Add(currentAgency.FullId, currentAgency); } // Add all the top level sdmxObjects to the maintainables list this.AddObjectsToMap(sdmxObjects); // LOOP THROUGH ALL THE BEANS AND RESOLVE ALL THE REFERENCES if (this._resolveAgencies) { /* foreach */ foreach (IMaintainableObject currentBean in sdmxObjects.GetAllMaintainables()) { try { this.ResolveAgency(currentBean, retrievalManager); } catch (CrossReferenceException e) { throw new SdmxReferenceException(e, AgencyRef(currentBean.AgencyId)); } } } ISet<IMaintainableObject> loopSet = new HashSet<IMaintainableObject>(); loopSet.AddAll(sdmxObjects.GetAllMaintainables()); ISdmxObjectRetrievalManager retMan = new InMemoryRetrievalManager(sdmxObjects); /* foreach */ foreach (IMaintainableObject currentMaintainable in loopSet) { this._log.Debug("Resolving References For : " + currentMaintainable.Urn); ISet<ICrossReference> crossReferences0; if (_maintainableCrossReferenceRetrieverEngine != null) { crossReferences0 = _maintainableCrossReferenceRetrieverEngine.GetCrossReferences(retMan, currentMaintainable); } else { crossReferences0 = currentMaintainable.CrossReferences; } this._log.Debug("Number of References : " + crossReferences0.Count); int i = 0; /* foreach */ foreach (ICrossReference crossReference in crossReferences0) { i++; if (this._log.IsDebugEnabled) { this._log.Debug( "Resolving Reference " + i + ": " + crossReference + " - referenced from -" + crossReference.ReferencedFrom.StructureType); } try { this.StoreRef( crossReference.ReferencedFrom, this.ResolveCrossReference(crossReference, retrievalManager)); } catch (CrossReferenceException e) { if (populateMissingMap == null) { throw; } HandleMissingReference(e, populateMissingMap); //throw new ReferenceException(e, "Reference from structure '" + currentMaintainable.Urn + "' can not be resolved"); } } } return this._crossReferences; }
/// <summary> /// Get subset workspace from <paramref name="query"/> /// </summary> /// <param name="query"> /// The query. /// </param> /// <returns> /// The <see cref="IStructureWorkspace"/>. /// </returns> public virtual IStructureWorkspace GetSubsetWorkspace(params IStructureReference[] query) { ISet<IMaintainableObject> maintainablesSubset = new HashSet<IMaintainableObject>(); IDictionaryOfSets<IIdentifiableObject, IIdentifiableObject> crossReferencedSubset = new DictionaryOfSets<IIdentifiableObject, IIdentifiableObject>(); for (int i = 0; i < query.Length; i++) { IStructureReference currentQuery = query[i]; ISet<IMaintainableObject> maintainableForStructure = this._sdmxObjects.GetMaintainables(currentQuery.MaintainableStructureEnumType.EnumType); ISet<IMaintainableObject> maintainableMatches = MaintainableUtil<IMaintainableObject>.FindMatches(maintainableForStructure, currentQuery); maintainablesSubset.AddAll(maintainableMatches); /* foreach */ foreach (IMaintainableObject currentMatch in maintainableMatches) { ISet<IIdentifiableObject> identifiables = (this._crossReferencedObjects == null) ? (new HashSet<IIdentifiableObject>()) : this._crossReferencedObjects[currentMatch]; if (identifiables != null) { crossReferencedSubset.Add(currentMatch, identifiables); } } } ISdmxObjects beansSubset = new SdmxObjectsImpl(this._sdmxObjects.Header, maintainablesSubset); return new StructureWorkspace(beansSubset, crossReferencedSubset); }
public void Init() { var parent = Path.GetDirectoryName (DiskPath); var siblings = new SortedDictionary<string, Page> (); foreach (var i in Tacker.Pages) { if (i.DiskPath.Equals (parent)) { Parent = i; } if (parent.Equals (Path.GetDirectoryName (i.DiskPath)) && i != this && !i.IsFloating) { siblings.Add (Path.GetFileName (i.DiskPath), i); } } Siblings = new List<Page> (siblings.Values); var metadata = new Dictionary<string, object> (); var assets = new HashSet<string> (); foreach (var i in Files.GetAllFiles (DiskPath)) { var md = Tacker.ProcessMetadata (i); if (md != null) { Template = Template ?? Path.GetFileNameWithoutExtension (i); metadata.AddAll (md); continue; } var markup = Tacker.ProcessMarkup (i); if (markup != null) { metadata.Add (Path.GetFileNameWithoutExtension (i), markup); continue; } assets.Add (i); } var pagePaths = new HashSet<string> (); foreach (var page in Tacker.Pages) { pagePaths.Add (page.DiskPath); } foreach (var subdir in Files.EnumerateAllSubdirs (DiskPath)) { if (!pagePaths.Contains (subdir)) { assets.AddAll (Files.GetAllFiles (subdir)); } } Assets = new HashSet<string> (assets.Select (x => x.Replace (DiskPath, ""))); Variables = metadata; inited = true; }
/// <summary> /// Increments the versions of sdmx objects /// </summary> /// <param name="sdmxObjects"> /// The sdmx objects. /// </param> public void IncrementVersions(ISdmxObjects sdmxObjects) { _log.Info("Update Versions of Structures if existing structures found"); //Store a map of old versions vs the new version IDictionary<IStructureReference, IStructureReference> oldVsNew = new Dictionary<IStructureReference, IStructureReference>(); IDictionary<IMaintainableObject, IMaintainableObject> oldMaintVsNew = new Dictionary<IMaintainableObject, IMaintainableObject>(); ISet<IMaintainableObject> updatedMaintainables = new HashSet<IMaintainableObject>(); ISet<IMaintainableObject> oldMaintainables = new HashSet<IMaintainableObject>(); foreach (IMaintainableObject currentMaint in sdmxObjects.GetAllMaintainables()) { _log.Debug("Auto Version - check latest version for maintainable: " + currentMaint); IMaintainableObject persistedMaintainable = this._structureVersionRetrievalManager.GetLatest(currentMaint); if (persistedMaintainable == null) { persistedMaintainable = this._beanRetrievalManager.GetMaintainableObject(currentMaint.AsReference); } if (persistedMaintainable != null) { if (VersionableUtil.IsHigherVersion(persistedMaintainable.Version, currentMaint.Version)) { //Modify version of maintainable to be the same as persisted maintainable IMaintainableMutableObject mutableInstance = currentMaint.MutableInstance; mutableInstance.Version = persistedMaintainable.Version; //Remove the Maintainable from the submission - as we've changed the versions sdmxObjects.RemoveMaintainable(currentMaint); //currentMaint = mutableInstance.ImmutableInstance; } if (persistedMaintainable.Version.Equals(currentMaint.Version)) { _log.Debug("Latest version is '" + persistedMaintainable.Version + "' perform update checks"); if (!currentMaint.DeepEquals(persistedMaintainable, true)) { ISet<IIdentifiableObject> allIdentifiables1 = currentMaint.IdentifiableComposites; ISet<IIdentifiableObject> allIdentifiables2 = persistedMaintainable.IdentifiableComposites; bool containsAll = allIdentifiables1.ContainsAll(allIdentifiables2) && allIdentifiables2.ContainsAll(allIdentifiables1); if (_log.IsInfoEnabled) { string increment = containsAll ? "Minor" : "Major"; _log.Info("Perform " + increment + " Version Increment for structure:" + currentMaint.Urn); } //Increment the version number IMaintainableObject newVersion = this.IncrmentVersion( currentMaint, persistedMaintainable.Version, !containsAll); //Remove the Maintainable from the submission sdmxObjects.RemoveMaintainable(currentMaint); //Store the newly updated maintainable in a container for further processing updatedMaintainables.Add(newVersion); oldMaintainables.Add(currentMaint); //Store the old version number mappings to the new version number oldMaintVsNew.Add(currentMaint, newVersion); oldVsNew.Add(currentMaint.AsReference, newVersion.AsReference); string oldVersionNumber = currentMaint.Version; AddOldVsNewReferences(oldVersionNumber, newVersion, oldVsNew); } } } } //Create a set of parent sdmxObjects to not update (regardless of version) ISet<IMaintainableObject> filterSet = new HashSet<IMaintainableObject>(updatedMaintainables); filterSet.AddAll(sdmxObjects.GetAllMaintainables()); //Get all the referencing structures to reversion them IEnumerable<IMaintainableObject> referencingStructures = this.RecurseUpTree(oldMaintainables, new HashSet<IMaintainableObject>(), filterSet); foreach (IMaintainableObject currentReferencingStructure in referencingStructures) { _log.Info("Perform Minor Version Increment on referencing structure:" + currentReferencingStructure); String newVersionNumber; if (oldMaintVsNew.ContainsKey(currentReferencingStructure)) { //The old maintainable is also in the submission and has had it's version number incremented, use this version var tmp = oldMaintVsNew[currentReferencingStructure]; //currentReferencingStructure = oldMaintVsNew[currentReferencingStructure]; updatedMaintainables.Remove(tmp); newVersionNumber = currentReferencingStructure.Version; } else { newVersionNumber = VersionableUtil.IncrementVersion(currentReferencingStructure.Version, false); } IMaintainableObject updatedMaintainable = this._crossReferenceReversionEngine.UdpateReferences( currentReferencingStructure, oldVsNew, newVersionNumber); AddOldVsNewReferences(currentReferencingStructure.Version, updatedMaintainable, oldVsNew); updatedMaintainables.Add(updatedMaintainable); } foreach (IMaintainableObject currentReferencingStructure in updatedMaintainables) { IMaintainableObject updatedMaintainable = this._crossReferenceReversionEngine.UdpateReferences( currentReferencingStructure, oldVsNew, currentReferencingStructure.Version); sdmxObjects.AddIdentifiable(updatedMaintainable); } //Update the references of any structures that existed in the submission foreach (IMaintainableObject currentReferencingStructure in sdmxObjects.GetAllMaintainables()) { IMaintainableObject updatedMaintainable = this._crossReferenceReversionEngine.UdpateReferences( currentReferencingStructure, oldVsNew, currentReferencingStructure.Version); sdmxObjects.AddIdentifiable(updatedMaintainable); } }
public static ISet Union(ISet a, ISet b) { IMutableSet union = new HashSet(a); union.AddAll(b); return union; }
public virtual void Inform(ResourceLoader loader) { IList<string> files = splitFileNames(stopTypesFiles); if (files.Count > 0) { stopTypes = new HashSet<string>(); foreach (string file in files) { IList<string> typesLines = getLines(loader, file.Trim()); stopTypes.AddAll(typesLines); } } }
/// <summary> /// Given a path to an assembly and a set of paths to possible dependencies, /// identifies which of the assembly's references are missing. May throw. /// </summary> /// <exception cref="IOException">If the files does not exist or cannot be accessed.</exception> /// <exception cref="BadImageFormatException">If one of the files is not an assembly or is somehow corrupted.</exception> public static ImmutableArray<AssemblyIdentity> IdentifyMissingDependencies(string assemblyPath, IEnumerable<string> dependencyFilePaths) { Debug.Assert(assemblyPath != null); Debug.Assert(PathUtilities.IsAbsolute(assemblyPath)); Debug.Assert(dependencyFilePaths != null); HashSet<AssemblyIdentity> assemblyDefinitions = new HashSet<AssemblyIdentity>(); foreach (var potentialDependency in dependencyFilePaths) { using (var reader = new PEReader(FileUtilities.OpenRead(potentialDependency))) { var metadataReader = reader.GetMetadataReader(); var assemblyDefinition = metadataReader.ReadAssemblyIdentityOrThrow(); assemblyDefinitions.Add(assemblyDefinition); } } HashSet<AssemblyIdentity> assemblyReferences = new HashSet<AssemblyIdentity>(); using (var reader = new PEReader(FileUtilities.OpenRead(assemblyPath))) { var metadataReader = reader.GetMetadataReader(); var references = metadataReader.GetReferencedAssembliesOrThrow(); assemblyReferences.AddAll(references); } assemblyReferences.ExceptWith(assemblyDefinitions); return ImmutableArray.CreateRange(assemblyReferences); }
/// <summary> /// Returns a set of IdentifiableBeans that the IMaintainableObject cross references /// </summary> /// <param name="bean"> The bean. </param> /// <param name="resolveAgencies"> - if true will also resolve the agencies </param> /// <param name="numberLevelsDeep"> references, an argument of 0 (zero) implies there is no limit, and the resolver engine will continue re-cursing until it has found every directly and indirectly referenced artifact. Note that there is no risk of infinite recursion in calling this. </param> /// <param name="retrievalManager"> - Used to resolve the structure references. Can be null, if supplied this is used to resolve any references that do not exist in the supplied sdmxObjects </param> /// <exception cref="CrossReferenceException">- if any of the references could not be resolved</exception> /// <returns> a set of IdentifiableBeans that the IMaintainableObject cross references </returns> public virtual ISet<IIdentifiableObject> ResolveReferences( IMaintainableObject bean, bool resolveAgencies, int numberLevelsDeep, IIdentifiableRetrievalManager retrievalManager) { this.ResetMaps(); ISdmxObjects beans = new SdmxObjectsImpl(); beans.AddIdentifiable(bean); IDictionary<IIdentifiableObject, ISet<IIdentifiableObject>> references = this.ResolveReferences( beans, resolveAgencies, numberLevelsDeep, retrievalManager); ISet<IIdentifiableObject> returnSet = new HashSet<IIdentifiableObject>(); /* foreach */ foreach (KeyValuePair<IIdentifiableObject, ISet<IIdentifiableObject>> key in references) { returnSet.AddAll(key.Value); } return returnSet; }
private static async Task<SearchResult> AddLocationsReferenceSymbolsAsync( ISymbol symbol, Solution solution, CancellationToken cancellationToken) { var locations = new HashSet<RenameLocation>(); var referenceSymbols = await SymbolFinder.FindRenamableReferencesAsync(symbol, solution, cancellationToken).ConfigureAwait(false); foreach (var referencedSymbol in referenceSymbols) { locations.AddAll( await ReferenceProcessing.GetRenamableDefinitionLocationsAsync(referencedSymbol.Definition, symbol, solution, cancellationToken).ConfigureAwait(false)); locations.AddAll( await referencedSymbol.Locations.SelectManyAsync<ReferenceLocation, RenameLocation>( (l, c) => ReferenceProcessing.GetRenamableReferenceLocationsAsync(referencedSymbol.Definition, symbol, l, solution, c), cancellationToken).ConfigureAwait(false)); } var implicitLocations = new List<ReferenceLocation>(referenceSymbols.SelectMany(refSym => refSym.Locations).Where(loc => loc.IsImplicit)); var referencedSymbols = new List<ISymbol>(referenceSymbols.Select(r => r.Definition).Where(r => !r.Equals(symbol))); return new SearchResult(locations, implicitLocations, referencedSymbols); }
private static async Task<IEnumerable<INamedTypeSymbol>> FindSourceTypesInProjectAsync( HashSet<INamedTypeSymbol> sourceAndMetadataTypes, Project project, Func<HashSet<INamedTypeSymbol>, INamedTypeSymbol, bool> sourceTypeImmediatelyMatches, Func<INamedTypeSymbol, bool> shouldContinueSearching, bool transitive, CancellationToken cancellationToken) { // We're going to be sweeping over this project over and over until we reach a // fixed point. In order to limit GC and excess work, we cache all the sematic // models and DeclaredSymbolInfo for hte documents we look at. // Because we're only processing a project at a time, this is not an issue. var cachedModels = new ConcurrentSet<SemanticModel>(); var cachedInfos = new ConcurrentSet<IDeclarationInfo>(); var finalResult = new HashSet<INamedTypeSymbol>(SymbolEquivalenceComparer.Instance); var typesToSearchFor = new HashSet<INamedTypeSymbol>(SymbolEquivalenceComparer.Instance); typesToSearchFor.AddAll(sourceAndMetadataTypes); var inheritanceQuery = new InheritanceQuery(sourceAndMetadataTypes); // As long as there are new types to search for, keep looping. while (typesToSearchFor.Count > 0) { // Compute the set of names to look for in the base/interface lists. inheritanceQuery.TypeNames.AddRange(typesToSearchFor.Select(c => c.Name)); // Search all the documents of this project in parallel. var tasks = project.Documents.Select(d => FindImmediatelyInheritingTypesInDocumentAsync( d, typesToSearchFor, inheritanceQuery, cachedModels, cachedInfos, sourceTypeImmediatelyMatches, cancellationToken)).ToArray(); await Task.WhenAll(tasks).ConfigureAwait(false); // Clear out the information about the types we're looking for. We'll // fill these in if we discover any more types that we need to keep searching // for. typesToSearchFor.Clear(); inheritanceQuery.TypeNames.Clear(); foreach (var task in tasks) { if (task.Result != null) { foreach (var derivedType in task.Result) { if (finalResult.Add(derivedType)) { if (transitive && shouldContinueSearching(derivedType)) { typesToSearchFor.Add(derivedType); } } } } } } return finalResult; }