/// <summary> /// Locates the changes between the prior and post state of the modules.. /// </summary> /// <param name="modules_prior">List of the available modules prior to the update.</param> /// <param name="modules_post">List of the available modules after the update.</param> private void PrintChanges(List<CkanModule> modules_prior, List<CkanModule> modules_post) { var prior = new HashSet<CkanModule>(modules_prior, new NameComparer()); var post = new HashSet<CkanModule>(modules_post, new NameComparer()); var added = new HashSet<CkanModule>(post.Except(prior, new NameComparer())); var removed = new HashSet<CkanModule>(prior.Except(post, new NameComparer())); var unchanged = post.Intersect(prior);//Default compare includes versions var updated = post.Except(unchanged).Except(added).Except(removed).ToList(); // Print the changes. user.RaiseMessage("Found {0} new modules, {1} removed modules and {2} updated modules.", added.Count(), removed.Count(), updated.Count()); if (added.Count > 0) { PrintModules("New modules [Name (CKAN identifier)]:", added); } if (removed.Count > 0) { PrintModules("Removed modules [Name (CKAN identifier)]:", removed); } if (updated.Count > 0) { PrintModules("Updated modules [Name (CKAN identifier)]:", updated); } }
public override void OnUpdateList () { base.OnUpdateList (); StackFrame frame = DebuggingService.CurrentFrame; if (frame == null || !FrameEquals (frame, lastFrame)) { tree.ClearExpressions (); lastExpressions = null; } lastFrame = frame; if (frame == null) return; //FIXME: tree should use the local refs rather than expressions. ATM we exclude items without names var expr = new HashSet<string> (frame.GetAllLocals ().Select (i => i.Name) .Where (n => !string.IsNullOrEmpty (n) && n != "?")); //add expressions not in tree already, remove expressions that are longer valid if (lastExpressions != null) { foreach (string rem in lastExpressions.Except (expr)) tree.RemoveExpression (rem); foreach (string rem in expr.Except (lastExpressions)) tree.AddExpression (rem); } else { tree.AddExpressions (expr); } lastExpressions = expr; }
public void AllValuesInArrayAreIndexed() { var coll = GetCollection(); // Make sure property Tags is indexed coll.Indices.EnsureIndex("Tags"); // Insert something where Tags is list of values const string key = "a key of some sort"; var values = new HashSet<object> {"A", "B", DateTime.Now, null, Math.PI}; coll.Update(key, new {Tags = values}); // Now, extract what the index really contains var indexContent = new List<KeyValuePair<IndexValue, string>>(); coll.Indices.VisitIndex("Tags", indexContent.Add); // First of all, each index entry must point to our key foreach (var kv in indexContent) { Assert.AreEqual(key, kv.Value); } // Second, the index should contain the array values var indexValues = new HashSet<object>(indexContent.Select(kv => kv.Key.Value)); Assert.AreEqual(0, indexValues.Except(values).Count(), "Index values are too many"); Assert.AreEqual(0, values.Except(indexValues).Count(), "Index values are too feew"); }
public static List<DateTime> GetBusinessDays(DateTime dateFrom, DateTime dateTo) { HashSet<DateTime> holidays = new HashSet<DateTime>(); HashSet<DateTime> businessDaysSet = new HashSet<DateTime>(); for (int year=dateFrom.Year; year<=dateTo.Year; year++) { HashSet<DateTime> holidayList = GetHolidays(year); holidays.UnionWith(holidayList); } for (var dt = dateFrom; dt <= dateTo; dt = dt.AddDays(1)) {if ((dt.DayOfWeek == DayOfWeek.Saturday) || (dt.DayOfWeek == DayOfWeek.Sunday)) continue; else businessDaysSet.Add(dt); } List<DateTime> businessDays = businessDaysSet.Except(holidays).ToList(); businessDays.Sort((a,b)=>a.CompareTo(b)); return businessDays; }
IEnumerator LoadTagCoroutine(string id, string sceneName, string tagName) { var before = new HashSet<GameObject>((GameObject[])GameObject.FindObjectsOfType(typeof(GameObject))); Application.LoadLevelAdditive(sceneName); yield return new WaitForEndOfFrame(); var after = new HashSet<GameObject>((GameObject[])GameObject.FindObjectsOfType(typeof(GameObject))); var newObjects = after.Except(before).ToList(); var destroy = newObjects.Where (arg => arg.tag != tagName).ToArray(); foreach(var i in destroy) { var parent = i.transform.parent; while(parent && parent.tag != tagName) { parent = parent.transform.parent; } if(parent) continue; GameObject.DestroyImmediate(i); } newObjects.RemoveAll((obj) => !obj); Group.AddToGroup(id, newObjects); }
public DiffResult DiffInputs(CompilerIO other) { var myInputSet = new HashSet<string>(Inputs); var otherInputSet = new HashSet<string>(other.Inputs); var additions = myInputSet.Except(otherInputSet); var deletions = otherInputSet.Except(myInputSet); return new DiffResult(additions, deletions); }
IEnumerator LoadCoroutine(string id, string sceneName) { var before = new HashSet<GameObject>((GameObject[])GameObject.FindObjectsOfType(typeof(GameObject))); Application.LoadLevelAdditive(sceneName); yield return 0; var after = new HashSet<GameObject>((GameObject[])GameObject.FindObjectsOfType(typeof(GameObject))); var newObjects = after.Except(before).ToList(); Group.AddToGroup(id, newObjects); }
/// <summary> /// 检查要设置的值是否存在于候选值列表中 /// </summary> /// <param name="values">所要设置的值</param> /// <param name="message">若不能设置,获取错误信息</param> /// <returns>是否可以设置</returns> protected virtual bool CanSetValues( HashSet<string> values, out string message ) { var invalidValue = values.Except( CandidateValues ).FirstOrDefault(); if ( invalidValue != null && !Form.Configuration.IgnoreInvailidValuesInGroupControl )//如果有一个设置的值不在候选值列表 { message = string.Format( "不能对控件设置值 \"{0}\"", invalidValue.First() ); return false; } message = null; return true; }
public void Run(Action<string> emitError, Action<string> emitWarning, Ruleset rules) { foreach (var actorInfo in rules.Actors) { if (actorInfo.Key.StartsWith("^", StringComparison.Ordinal)) continue; var granted = new HashSet<string>(); var consumed = new HashSet<string>(); foreach (var trait in actorInfo.Value.TraitInfos<ITraitInfo>()) { var fieldConsumed = trait.GetType().GetFields() .Where(x => x.HasAttribute<ConsumedConditionReferenceAttribute>()) .SelectMany(f => LintExts.GetFieldValues(trait, f, emitError)); var propertyConsumed = trait.GetType().GetProperties() .Where(x => x.HasAttribute<ConsumedConditionReferenceAttribute>()) .SelectMany(p => LintExts.GetPropertyValues(trait, p, emitError)); var fieldGranted = trait.GetType().GetFields() .Where(x => x.HasAttribute<GrantedConditionReferenceAttribute>()) .SelectMany(f => LintExts.GetFieldValues(trait, f, emitError)); var propertyGranted = trait.GetType().GetProperties() .Where(x => x.HasAttribute<GrantedConditionReferenceAttribute>()) .SelectMany(f => LintExts.GetPropertyValues(trait, f, emitError)); foreach (var c in fieldConsumed.Concat(propertyConsumed)) if (!string.IsNullOrEmpty(c)) consumed.Add(c); foreach (var g in fieldGranted.Concat(propertyGranted)) if (!string.IsNullOrEmpty(g)) granted.Add(g); } var unconsumed = granted.Except(consumed); if (unconsumed.Any()) emitWarning("Actor type `{0}` grants conditions that are not consumed: {1}".F(actorInfo.Key, unconsumed.JoinWith(", "))); var ungranted = consumed.Except(granted); if (ungranted.Any()) emitError("Actor type `{0}` consumes conditions that are not granted: {1}".F(actorInfo.Key, ungranted.JoinWith(", "))); if ((consumed.Any() || granted.Any()) && actorInfo.Value.TraitInfoOrDefault<UpgradeManagerInfo>() == null) emitError("Actor type `{0}` defines conditions but does not include an UpgradeManager".F(actorInfo.Key)); } }
public ActionResult Refresh() { var directory = HostingEnvironment.VirtualPathProvider.GetDirectory( VirtualPathUtility.ToAbsolute( templatePath ) ); var data = new HashSet<string>( dbUtility.Data( "SELECT virtualPath FROM Templates " ).Column<string>(), StringComparer.OrdinalIgnoreCase ); var local = new HashSet<string>( SearchFiles( directory ).Select( f => VirtualPathUtility.ToAppRelative( f.VirtualPath ) ) ); foreach ( var virtualPath in local.Except( data ) ) dbUtility.NonQuery( "INSERT INTO Templates ( virtualPath ) VALUES ( {0} )", virtualPath ); foreach ( var virtualPath in data.Except( local ) ) dbUtility.NonQuery( "DELETE Templates WHERE virtualPath = {0}", virtualPath ); return RedirectToAction( "List" ); }
public static void Update() { currentState = Keyboard.GetState(); HashSet <Keys> currentKeySet = new HashSet <Keys> (currentState.GetPressedKeys()); IEnumerable <Keys> pressedKeys = currentKeySet.Except(previousKeySet); IEnumerable <Keys> releasedKeys = previousKeySet.Except(currentKeySet); if (pressedKeys.Count() > 0) { FireKeyPressed(new KeyEvent(pressedKeys)); } if (releasedKeys.Count() > 0) { FireKeyReleased(new KeyEvent(releasedKeys)); } previousKeySet = currentKeySet; previousState = currentState; }
public static void AllStatesCanBeReached(IEnumerable<State> states) { var allStates = new HashSet<State>(states); var visitedStates = new HashSet<State>(); TraverseStateGraph(states.First(), visitedStates); var unreachableStates = allStates.Except(visitedStates).ToList(); if (unreachableStates.Count != 0) { Console.WriteLine("Unreachable states:"); foreach (var unreachableState in unreachableStates) { Console.WriteLine(unreachableState.Name); } } Assert.Equal(allStates.Count, visitedStates.Count); }
// cross check /Resources/Prefabs and Levels.xml if there are any item prefabs that exist only in one but not the other public void crossCheck() { // create a two sets of prefabs HashSet<string> resPrefabSet = new HashSet<string>(); HashSet<string> xmlPrefabSet = new HashSet<string>(); // Get prefabs from Levels.xml getLevelPrefabs(xmlPrefabSet); // Get prefabs from the /Resources/Prefabs folder getResPrefabs(resPrefabSet); // Cross checks foreach (string prefab in xmlPrefabSet.Except(resPrefabSet).ToList()) Debug.LogError(prefab + " is missing in the /Resorces/Prefabs folder but used in Levels.xml"); foreach (string prefab in resPrefabSet.Except(xmlPrefabSet).ToList()) Debug.Log(prefab + " exists in the /Resorces/Prefabs folder but not used in Levels.xml"); Debug.Log("Cross Check Done"); }
public void AssertNumberOfResultsIsConsistentOnRecompile(IEnumerable<IViewComputationResultModel> results) { HashSet<Tuple<string, ValueSpecification>> values = null; foreach (var result in results) { Assert.NotNull(result); var newValues = new HashSet<Tuple<string, ValueSpecification>>(result.AllResults.Select(r => Tuple.Create(r.CalculationConfiguration, r.ComputedValue.Specification))); if (values != null) { if (!values.SetEquals(newValues)) { var missing = values.Except(newValues).ToList(); var added = newValues.Except(values).ToList(); throw new Exception(string.Format("Previously missing {0} results, now {1}. Missing {2}. Added {3}", values.Count, newValues.Count, String.Join(",", missing), String.Join(",", added))); } } else { values = newValues; } } }
public void Should_Return_All_Metadata_From_Metadata_Repository_OnGet() { // Arrange var queryableList = this.ConstructQueryableList(); MetadataController controller = new MetadataController(); var metadataRepositoryMock = new Mock<IMetadataRepository>(); metadataRepositoryMock.Setup(x => x.GetAll()).Returns(queryableList).Verifiable(); controller.MetadataRepository = metadataRepositoryMock.Object; // Act var actionResult = controller.Get(); // Assert GenericValueResult<IEnumerable<Models.MetadataInfo>> results = actionResult as GenericValueResult<IEnumerable<Models.MetadataInfo>>; results.Should().NotBeNull("Wrong data type was returned from the controller"); results.Value.Count().Should().Be(queryableList.Count()); HashSet<int> receivedIds = new HashSet<int>(results.Value.Select(x => x.Id.Value).AsEnumerable()); HashSet<int> expectedIds = new HashSet<int>(queryableList.Select(x => x.Id).AsEnumerable()); expectedIds.Except(receivedIds).Count().Should().Be(0, "Result list should contain all the same IDs as the list in Repository"); metadataRepositoryMock.VerifyAll(); }
public ActionResult _RegisterPushTargets(int topicID, Dictionary<int, bool> users) { var topic = db.Topics.Find(topicID); if (topic.IsReadOnly) return HTTPStatus(HttpStatusCode.Forbidden, "Das Thema ist schreibgeschützt."); var desired = new HashSet<int>(users.Where(kvp => kvp.Value).Select(kvp => kvp.Key)); var actual = new HashSet<int>(db.PushNotifications.Where(pn => pn.TopicID == topicID).Select(pn => pn.UserID)); foreach (var userID in desired.Except(actual)) db.PushNotifications.Add(new PushNotification { TopicID = topicID, UserID = userID }); foreach (var userID in actual.Except(desired)) db.PushNotifications.RemoveRange(db.PushNotifications.Where(pn => pn.TopicID == topicID && pn.UserID == userID)); db.SaveChanges(); return _EditListTopic(topic); }
/// <summary> /// 求两个Hash集合的差集(不改变原集合) /// 将返回其不在 second 中的元素 /// 注释:Except方法是IEnumrable的方法,也可用于List等 /// </summary> /// <param name="first"></param> /// <param name="compararSet"></param> /// <returns></returns> public static IList<int> setExcept(HashSet<int> first, HashSet<int> second) { return first.Except<int>(second).ToList<int>(); }
public TypeInfo CreateProxyType(System.Type baseType, IReadOnlyCollection <System.Type> baseInterfaces) { var typeName = $"{baseType.Name}Proxy"; var assemblyName = $"{typeName}Assembly"; var moduleName = $"{typeName}Module"; var name = new AssemblyName(assemblyName); var assemblyBuilder = ProxyAssemblyBuilder.DefineDynamicAssembly(AppDomain.CurrentDomain, name); var moduleBuilder = ProxyAssemblyBuilder.DefineDynamicModule(assemblyBuilder, moduleName); const TypeAttributes typeAttributes = TypeAttributes.AutoClass | TypeAttributes.Class | TypeAttributes.Public | TypeAttributes.BeforeFieldInit; var interfaces = new HashSet <System.Type> { // Add the ISerializable interface so that it can be implemented typeof(ISerializable) }; interfaces.UnionWith(baseInterfaces); interfaces.UnionWith(baseInterfaces.SelectMany(i => i.GetInterfaces())); interfaces.UnionWith(baseType.GetInterfaces()); // Use the object as the base type // since we're not inheriting from any class type var parentType = baseType; if (baseType.IsInterface) { parentType = typeof(object); interfaces.Add(baseType); } interfaces.RemoveWhere(i => !i.IsVisible); var typeBuilder = moduleBuilder.DefineType(typeName, typeAttributes, parentType, interfaces.ToArray()); var lazyInitializerField = typeBuilder.DefineField("__lazyInitializer", LazyInitializerType, FieldAttributes.Private); var proxyInfoField = typeBuilder.DefineField("__proxyInfo", typeof(NHibernateProxyFactoryInfo), FieldAttributes.Private); ImplementConstructor(typeBuilder, parentType, lazyInitializerField, proxyInfoField); // Provide a custom implementation of ISerializable // instead of redirecting it back to the interceptor foreach (var method in ProxyFactory.GetProxiableMethods(baseType, interfaces.Except(new[] { typeof(ISerializable) }))) { CreateProxiedMethod(typeBuilder, method, lazyInitializerField); } // Make the proxy serializable var serializableConstructor = typeof(SerializableAttribute).GetConstructor(System.Type.EmptyTypes); var customAttributeBuilder = new CustomAttributeBuilder(serializableConstructor, Array.Empty <object>()); typeBuilder.SetCustomAttribute(customAttributeBuilder); ImplementDeserializationConstructor(typeBuilder); ImplementGetObjectData(typeBuilder, proxyInfoField, lazyInitializerField); var proxyType = typeBuilder.CreateTypeInfo(); ProxyAssemblyBuilder.Save(assemblyBuilder); return(proxyType); }
/// <summary> /// Synchronizes CODEOWNERS contacts to appropriate DevOps groups /// </summary> /// <param name="organization">Azure DevOps organization name</param> /// <param name="project">Azure DevOps project name</param> /// <param name="devOpsTokenVar">Personal Access Token environment variable name</param> /// <param name="aadAppIdVar">AAD App ID environment variable name (Kusto access)</param> /// <param name="aadAppSecretVar">AAD App Secret environment variable name (Kusto access)</param> /// <param name="aadTenantVar">AAD Tenant environment variable name (Kusto access)</param> /// <param name="kustoUrlVar">Kusto URL environment variable name</param> /// <param name="kustoDatabaseVar">Kusto DB environment variable name</param> /// <param name="kustoTableVar">Kusto Table environment variable name</param> /// <param name="pathPrefix">Azure DevOps path prefix (e.g. "\net")</param> /// <param name="dryRun">Do not persist changes</param> /// <returns></returns> public static async Task Main( string organization, string project, string devOpsTokenVar, string aadAppIdVar, string aadAppSecretVar, string aadTenantVar, string kustoUrlVar, string kustoDatabaseVar, string kustoTableVar, string pathPrefix = "", bool dryRun = false ) { #pragma warning disable CS0618 // Type or member is obsolete using (var loggerFactory = LoggerFactory.Create(builder => { builder.AddConsole(config => { config.IncludeScopes = true; }); })) #pragma warning restore CS0618 // Type or member is obsolete { var devOpsService = AzureDevOpsService.CreateAzureDevOpsService( Environment.GetEnvironmentVariable(devOpsTokenVar), $"https://dev.azure.com/{organization}/", loggerFactory.CreateLogger <AzureDevOpsService>() ); var gitHubServiceLogger = loggerFactory.CreateLogger <GitHubService>(); var gitHubService = new GitHubService(gitHubServiceLogger); var githubNameResolver = new GitHubNameResolver( Environment.GetEnvironmentVariable(aadAppIdVar), Environment.GetEnvironmentVariable(aadAppSecretVar), Environment.GetEnvironmentVariable(aadTenantVar), Environment.GetEnvironmentVariable(kustoUrlVar), Environment.GetEnvironmentVariable(kustoDatabaseVar), Environment.GetEnvironmentVariable(kustoTableVar), loggerFactory.CreateLogger <GitHubNameResolver>() ); var logger = loggerFactory.CreateLogger <Program>(); var pipelines = (await devOpsService.GetPipelinesAsync(project)).ToDictionary(p => p.Id); var pipelineGroupTasks = (await devOpsService.GetAllTeamsAsync(project)) .Where(team => YamlHelper.Deserialize <TeamMetadata>(team.Description, swallowExceptions: true)?.Purpose == TeamPurpose.SynchronizedNotificationTeam ).Select(async team => { BuildDefinition pipeline; var pipelineId = YamlHelper.Deserialize <TeamMetadata>(team.Description).PipelineId; if (!pipelines.ContainsKey(pipelineId)) { pipeline = await devOpsService.GetPipelineAsync(project, pipelineId); } else { pipeline = pipelines[pipelineId]; } if (pipeline == default) { logger.LogWarning($"Could not find pipeline with id {pipelineId} referenced by team {team.Name}."); } return(new { Pipeline = pipeline, Team = team }); }); var pipelineGroups = await Task.WhenAll(pipelineGroupTasks); var filteredGroups = pipelineGroups.Where(group => group.Pipeline != default && group.Pipeline.Path.StartsWith(pathPrefix)); foreach (var group in filteredGroups) { using (logger.BeginScope("Team Name = {0}", group.Team.Name)) { if (group.Pipeline.Process.Type != PipelineYamlProcessType) { continue; } // Get contents of CODEOWNERS logger.LogInformation("Fetching CODEOWNERS file"); var managementUrl = new Uri(group.Pipeline.Repository.Properties["manageUrl"]); var codeownersContent = await gitHubService.GetCodeownersFile(managementUrl); if (codeownersContent == default) { logger.LogInformation("CODEOWNERS file not found, skipping sync"); continue; } var process = group.Pipeline.Process as YamlProcess; // Find matching contacts for the YAML file's path var parser = new CodeOwnersParser(codeownersContent); logger.LogInformation("Searching CODEOWNERS for matching path for {0}", process.YamlFilename); var contacts = parser.GetContactsForPath(process.YamlFilename); logger.LogInformation("Matching Contacts Path = {0}, NumContacts = {1}", process.YamlFilename, contacts.Count); // Get set of team members in the CODEOWNERS file var contactResolutionTasks = contacts .Where(contact => contact.StartsWith("@")) .Select(contact => githubNameResolver.GetInternalUserPrincipal(contact.Substring(1))); var codeownerPrincipals = await Task.WhenAll(contactResolutionTasks); var codeownersDescriptorsTasks = codeownerPrincipals .Where(userPrincipal => !string.IsNullOrEmpty(userPrincipal)) .Select(userPrincipal => devOpsService.GetDescriptorForPrincipal(userPrincipal)); var codeownersDescriptors = await Task.WhenAll(codeownersDescriptorsTasks); var codeownersSet = new HashSet <string>(codeownersDescriptors); // Get set of existing team members var teamMembers = await devOpsService.GetMembersAsync(group.Team); var teamContactTasks = teamMembers .Select(async member => await devOpsService.GetUserFromId(new Guid(member.Identity.Id))); var teamContacts = await Task.WhenAll(teamContactTasks); var teamDescriptors = teamContacts.Select(contact => contact.SubjectDescriptor.ToString()); var teamSet = new HashSet <string>(teamDescriptors); // Synchronize contacts var contactsToRemove = teamSet.Except(codeownersSet); var contactsToAdd = codeownersSet.Except(teamSet); var teamDescriptor = await devOpsService.GetDescriptorAsync(group.Team.Id); foreach (var descriptor in contactsToRemove) { logger.LogInformation("Delete Contact TeamDescriptor = {0}, ContactDescriptor = {1}", teamDescriptor, descriptor); if (!dryRun) { await devOpsService.RemoveMember(teamDescriptor, descriptor); } } foreach (var descriptor in contactsToAdd) { logger.LogInformation("Add Contact TeamDescriptor = {0}, ContactDescriptor = {1}", teamDescriptor, descriptor); if (!dryRun) { await devOpsService.AddToTeamAsync(teamDescriptor, descriptor); } } } } } }
public void TestAdvancedIBToolFunctionality() { var tmp = Cache.CreateTemporaryDirectory("advanced-ibtool"); IBTool ibtool; var srcdir = Path.Combine(Configuration.TestProjectsDirectory, "IBToolTaskTests", "LinkedAndTranslated"); ibtool = CreateIBToolTask(ApplePlatform.iOS, srcdir, tmp); var bundleResources = new HashSet <string> (); // Add some ResourceTags... foreach (var storyboard in ibtool.InterfaceDefinitions) { var tag = Path.GetFileNameWithoutExtension(storyboard.ItemSpec); storyboard.SetMetadata("ResourceTags", tag); } ibtool.EnableOnDemandResources = true; Assert.IsTrue(ibtool.Execute(), "Execution of IBTool task failed."); foreach (var bundleResource in ibtool.BundleResources) { var bundleName = bundleResource.GetMetadata("LogicalName"); var tag = bundleResource.GetMetadata("ResourceTags"); Assert.IsTrue(File.Exists(bundleResource.ItemSpec), "File does not exist: {0}", bundleResource.ItemSpec); Assert.That(bundleResource.GetMetadata("LogicalName"), Is.Not.Null.Or.Empty, "The 'LogicalName' metadata must be set."); Assert.That(bundleResource.GetMetadata("Optimize"), Is.Not.Null.Or.Empty, "The 'Optimize' metadata must be set."); Assert.That(tag, Is.Not.Null.Or.Empty, "The 'ResourceTags' metadata should be set."); Assert.IsTrue(bundleName.Contains(".lproj/" + tag + ".storyboardc/"), "BundleResource does not have the proper ResourceTags set: {0}", bundleName); bundleResources.Add(bundleName); } ibtool.EnableOnDemandResources = true; string [] expected = { "en.lproj/Main.storyboardc/UIViewController-BYZ-38-t0r.nib", "en.lproj/Main.storyboardc/BYZ-38-t0r-view-8bC-Xf-vdC.nib", "en.lproj/Main.storyboardc/Info.plist", "en.lproj/Main.storyboardc/MyLinkedViewController.nib", "en.lproj/Linked.storyboardc/5xv-Yx-H4r-view-gMo-tm-chA.nib", "en.lproj/Linked.storyboardc/Info.plist", "en.lproj/Linked.storyboardc/MyLinkedViewController.nib", "Base.lproj/Main.storyboardc/UIViewController-BYZ-38-t0r.nib", "Base.lproj/Main.storyboardc/BYZ-38-t0r-view-8bC-Xf-vdC.nib", "Base.lproj/Main.storyboardc/Info.plist", "Base.lproj/Main.storyboardc/MyLinkedViewController.nib", "Base.lproj/Linked.storyboardc/5xv-Yx-H4r-view-gMo-tm-chA.nib", "Base.lproj/Linked.storyboardc/Info.plist", "Base.lproj/Linked.storyboardc/MyLinkedViewController.nib", "Base.lproj/LaunchScreen.storyboardc/01J-lp-oVM-view-Ze5-6b-2t3.nib", "Base.lproj/LaunchScreen.storyboardc/UIViewController-01J-lp-oVM.nib", "Base.lproj/LaunchScreen.storyboardc/Info.plist", }; var inexistentResource = bundleResources.Except(expected).ToArray(); var unexpectedResource = expected.Except(bundleResources).ToArray(); Assert.That(inexistentResource, Is.Empty, "No missing resources"); Assert.That(unexpectedResource, Is.Empty, "No extra resources"); }
/// <summary> /// 遊技台のデータ取得 /// </summary> /// <param name="dataDate"></param> /// <param name="inputSlotModels"></param> /// <returns></returns> public async Task GetSlotDataAsync(DateTime dataDate, List <string> inputSlotModels) { foreach (var tempo in _tempos) { //初期化 _floorUri.Clear(); _slotDataUri.Clear(); var slotMachineStartNo = tempo.SlotMachineStartNo; var slotMachineEndNo = tempo.SlotMachineEndNo; var slotMachineNumbers = Enumerable.Range(slotMachineStartNo, slotMachineEndNo - slotMachineStartNo + 1); if (inputSlotModels != null) { //指定した台が店舗に存在するか確かめられるURIを作成 _floorUri.AddRange(inputSlotModels.Select(slotModel => new Uri($"{tempo.StoreURL}unit_list?model={slotModel}"))); try { var floorStreamTasks = _floorUri.Select(floorUri => _httpClient.GetStreamAsync(floorUri)); var streamResponses = await Task.WhenAll(floorStreamTasks); //取得したソースを解析 var floorAnalyseTasks = streamResponses.Select(response => _analysisHTML.AnalyseFloorAsync(response, inputSlotModels)); var slotMachineNumbersForSlotModels = await Task.WhenAll(floorAnalyseTasks); //リストの平準化 slotMachineNumbers = slotMachineNumbersForSlotModels.SelectMany(x => x); } catch { //指定した機種が該当のホールになかったと判定する //Console.WriteLine($"{tempo.StoreName}に{slotModel}はありませんでした。"); slotMachineNumbers = new HashSet <int>(); } } try { //遊技台の情報があるURIを作成する var month = $"{dataDate.Month:D2}"; var day = $"{dataDate.Day:D2}"; _slotDataUri.AddRange(slotMachineNumbers .Select(slotMachineNumber => new Uri( $"{tempo.StoreURL}detail?unit={slotMachineNumber}&target_date={dataDate.Year}-{month}-{day}"))); //URIが存在するかどうかをチェック var requestCheckTasks = _slotDataUri.Select(uri => _httpClient.GetAsync(uri)); var requestCheckResponses = await Task.WhenAll(requestCheckTasks); //存在しないURIを取得 var notExistUri = requestCheckResponses .Where(x => x.StatusCode != HttpStatusCode.OK) .Select(x => x.RequestMessage.RequestUri); //存在するURI内のソースを取得 var slotDataStreamTasks = _slotDataUri .Except(notExistUri) .Select(uri => _httpClient.GetStreamAsync(uri)); var streamResponses = await Task.WhenAll(slotDataStreamTasks); //取得したソースを解析 var slotDataAnalyseTasks = streamResponses.Select( response => _analysisHTML.AnalyseAsync(response, _slotModels, tempo, inputSlotModels)); var analysisSlotData = await Task.WhenAll(slotDataAnalyseTasks); //解析したデータをコレクションに追加 foreach (var slotPlayData in analysisSlotData.Where(x => x != null)) { SlotPlayDataCollection.Add(slotPlayData); } } catch (Exception e) { var check = _floorUri; var check2 = _slotDataUri; var check3 = _streamUri; //他に何があるんやろうか throw; } } }
public void TestEnvironmentOfChildProcess() { const string UnicodeEnvVar = "TestEnvironmentOfChildProcess"; Environment.SetEnvironmentVariable(UnicodeEnvVar, "\x1234\x5678"); // ensure some Unicode characters are in the output try { var expectedEnv = new HashSet <string>(); var actualEnv = new HashSet <string>(); Process p = CreateProcess(() => { foreach (DictionaryEntry envVar in Environment.GetEnvironmentVariables()) { Console.WriteLine(envVar.Key + "=" + envVar.Value); } return(SuccessExitCode); }); p.StartInfo.StandardOutputEncoding = Encoding.UTF8; p.StartInfo.RedirectStandardOutput = true; p.OutputDataReceived += (s, e) => { if (e.Data != null) { expectedEnv.Add(e.Data); } }; p.Start(); p.BeginOutputReadLine(); foreach (KeyValuePair <string, string> envVar in p.StartInfo.Environment) { actualEnv.Add(envVar.Key + "=" + envVar.Value); } Assert.True(p.WaitForExit(WaitInMS)); p.WaitForExit(); // This ensures async event handlers are finished processing. // Validate against StartInfo.Environment if (!expectedEnv.SetEquals(actualEnv)) { var expected = string.Join(", ", expectedEnv.Except(actualEnv)); var actual = string.Join(", ", actualEnv.Except(expectedEnv)); Assert.True(false, string.Format("Expected: {0}{1}Actual: {2}", expected, Environment.NewLine, actual)); } // Validate against current process var currentProcEnv = new HashSet <string>(); foreach (DictionaryEntry envVar in Environment.GetEnvironmentVariables()) { currentProcEnv.Add(envVar.Key + "=" + envVar.Value); } // Profilers / code coverage tools can add own environment variables but we start // child process without them. Thus the set of variables from child process will // compose subset of variables from current process. // But in case if tests running directly through the Xunit runner, sets will be equal // and Assert.ProperSubset will throw. We add null to avoid this. currentProcEnv.Add(null); Assert.ProperSubset(currentProcEnv, actualEnv); } finally { Environment.SetEnvironmentVariable(UnicodeEnvVar, null); } }
/// <summary> /// Provides access to and/or creates a RocksDb persistent key-value store. /// </summary> public RocksDbStore(RocksDbStoreArguments arguments) { m_storeDirectory = arguments.StoreDirectory; m_openBulkLoad = arguments.OpenBulkLoad; m_defaults.DbOptions = new DbOptions() .SetCreateIfMissing(true) .SetCreateMissingColumnFamilies(true) // The background compaction threads run in low priority, so they should not hamper the rest of // the system. The number of cores in the system is what we want here according to official docs, // and we are setting this to the number of logical processors, which may be higher. // See: https://github.com/facebook/rocksdb/wiki/RocksDB-Tuning-Guide#parallelism-options #if !PLATFORM_OSX .SetMaxBackgroundCompactions(Environment.ProcessorCount) .SetMaxBackgroundFlushes(1) #else // The memtable uses significant chunks of available system memory on macOS, we increase the number // of background flushing threads (low priority) and set the DB write buffer size. This allows for // up to 128 MB in memtables across all column families before we flush to disk. .SetMaxBackgroundCompactions(Environment.ProcessorCount / 4) .SetMaxBackgroundFlushes(Environment.ProcessorCount / 4) .SetDbWriteBufferSize(128 << 20) #endif .IncreaseParallelism(Environment.ProcessorCount / 2); if (arguments.EnableStatistics) { m_defaults.DbOptions.EnableStatistics(); } if (arguments.OpenBulkLoad) { m_defaults.DbOptions.PrepareForBulkLoad(); } // Maximum number of information log files if (arguments.RotateLogsNumFiles != null) { m_defaults.DbOptions.SetKeepLogFileNum(arguments.RotateLogsNumFiles.Value); } // Do not rotate information logs based on file size if (arguments.RotateLogsMaxFileSizeBytes != null) { m_defaults.DbOptions.SetMaxLogFileSize(arguments.RotateLogsMaxFileSizeBytes.Value); } // How long before we rotate the current information log file if (arguments.RotateLogsMaxAge != null) { m_defaults.DbOptions.SetLogFileTimeToRoll((ulong)arguments.RotateLogsMaxAge.Value.Seconds); } if (arguments.FastOpen) { // max_file_opening_threads is defaulted to 16, so no need to update here. RocksDbSharp.Native.Instance.rocksdb_options_set_skip_stats_update_on_db_open(m_defaults.DbOptions.Handle, false); } // A small comment on things tested that did not work: // * SetAllowMmapReads(true) and SetAllowMmapWrites(true) produce a dramatic performance drop // * SetUseDirectReads(true) disables the OS cache, and although that's good for random point lookups, // it produces a dramatic performance drop otherwise. m_defaults.WriteOptions = new WriteOptions() // Disable the write ahead log to reduce disk IO. The write ahead log // is used to recover the store on crashes, so a crash will lose some writes. // Writes will be made in-memory only until the write buffer size // is reached and then they will be flushed to storage files. .DisableWal(1) // This option is off by default, but just making sure that the C# wrapper // doesn't change anything. The idea is that the DB won't wait for fsync to // return before acknowledging the write as successful. This affects // correctness, because a write may be ACKd before it is actually on disk, // but it is much faster. .SetSync(false); var blockBasedTableOptions = new BlockBasedTableOptions() // Use a bloom filter to help reduce read amplification on point lookups. 10 bits per key yields a // ~1% false positive rate as per the RocksDB documentation. This builds one filter per SST, which // means its optimized for not having a key. .SetFilterPolicy(BloomFilterPolicy.Create(10, false)) // Use a hash index in SST files to speed up point lookup. .SetIndexType(BlockBasedTableIndexType.HashSearch) // Whether to use the whole key or a prefix of it (obtained through the prefix extractor below). // Since the prefix extractor is a no-op, better performance is achieved by turning this off (i.e. // setting it to true). .SetWholeKeyFiltering(true); m_defaults.ColumnFamilyOptions = new ColumnFamilyOptions() #if PLATFORM_OSX // As advised by the official documentation, LZ4 is the preferred compression algorithm, our RocksDB // dynamic library has been compiled to support this on macOS. Fallback to Snappy on other systems (default). .SetCompression(CompressionTypeEnum.rocksdb_lz4_compression) #endif .SetBlockBasedTableFactory(blockBasedTableOptions) .SetPrefixExtractor(SliceTransform.CreateNoOp()); m_columns = new Dictionary <string, ColumnFamilyInfo>(); // The columns that exist in the store on disk may not be in sync with the columns being passed into the constructor HashSet <string> existingColumns; try { existingColumns = new HashSet <string>(RocksDb.ListColumnFamilies(m_defaults.DbOptions, m_storeDirectory)); } catch (RocksDbException) { // If there is no existing store, an exception will be thrown, ignore it existingColumns = new HashSet <string>(); } // In read-only mode, open all existing columns in the store without attempting to validate it against the expected column families if (arguments.ReadOnly) { var columnFamilies = new ColumnFamilies(); foreach (var name in existingColumns) { columnFamilies.Add(name, m_defaults.ColumnFamilyOptions); } m_store = RocksDb.OpenReadOnly(m_defaults.DbOptions, m_storeDirectory, columnFamilies, errIfLogFileExists: false); } else { // For read-write mode, column families may be added, so set up column families schema var additionalColumns = arguments.AdditionalColumns ?? CollectionUtilities.EmptyArray <string>(); var columnsSchema = new HashSet <string>(additionalColumns); // Default column columnsSchema.Add(ColumnFamilies.DefaultName); // For key-tracked column familiies, create two columns: // 1: Normal column of { key : value } // 2: Key-tracking column of { key : empty-value } if (arguments.DefaultColumnKeyTracked) { // To be robust to the RocksDB-selected default column name changing, // just name the default column's key-tracking column KeyColumnSuffix columnsSchema.Add(KeyColumnSuffix); } var additionalKeyTrackedColumns = arguments.AdditionalKeyTrackedColumns ?? CollectionUtilities.EmptyArray <string>(); foreach (var name in additionalKeyTrackedColumns) { columnsSchema.Add(name); columnsSchema.Add(name + KeyColumnSuffix); } // Figure out which columns are not part of the schema var outsideSchemaColumns = new List <string>(existingColumns.Except(columnsSchema)); // RocksDB requires all columns in the store to be opened in read-write mode, so merge existing columns // with the columns schema that was passed into the constructor existingColumns.UnionWith(columnsSchema); var columnFamilies = new ColumnFamilies(); foreach (var name in existingColumns) { columnFamilies.Add(name, m_defaults.ColumnFamilyOptions); } m_store = RocksDb.Open(m_defaults.DbOptions, m_storeDirectory, columnFamilies); // Provide an opportunity to update the store to the new column family schema if (arguments.DropMismatchingColumns) { foreach (var name in outsideSchemaColumns) { m_store.DropColumnFamily(name); existingColumns.Remove(name); } } } var userFacingColumns = existingColumns.Where(name => !name.EndsWith(KeyColumnSuffix)); foreach (var name in userFacingColumns) { var isKeyTracked = existingColumns.Contains(name + KeyColumnSuffix); m_columns.Add(name, new ColumnFamilyInfo() { Handle = m_store.GetColumnFamily(name), UseKeyTracking = isKeyTracked, KeyHandle = isKeyTracked ? m_store.GetColumnFamily(name + KeyColumnSuffix) : null, }); } m_columns.TryGetValue(ColumnFamilies.DefaultName, out m_defaultColumnFamilyInfo); }
public void Part2() { string data = IO.ReadFile(Path.Combine(Directory.GetCurrentDirectory(), "input1.txt")); List <Display> displays = new List <Display>(); foreach (string part in data.Split("\r\n")) { string[] input = part.Split("|"); displays.Add(new Display(input[0].Trim(), input[1].Trim())); } int total = 0; foreach (Display d in displays) { Dictionary <int, HashSet <char> > numberToSet = new Dictionary <int, HashSet <char> >(); Dictionary <HashSet <char>, int> setToNumber = new Dictionary <HashSet <char>, int>(); List <HashSet <char> > five = new List <HashSet <char> >(); List <HashSet <char> > six = new List <HashSet <char> >(); Queue <HashSet <char> > queue = new Queue <HashSet <char> >(d.inputs); Queue <HashSet <char> > unknownQueue = new Queue <HashSet <char> >(); // Find out 1, 4, 7, 8 while (queue.Count > 0) { HashSet <char> cur = queue.Dequeue(); int val = HashSetToInt(cur); if (val >= 0) { if (!numberToSet.ContainsKey(val)) { numberToSet.Add(val, cur); setToNumber.Add(cur, val); } else { Debug.Assert(numberToSet[val].Equals(cur)); } } else { if (cur.Count == 5) { five.Add(cur); } else if (cur.Count == 6) { six.Add(cur); } else { unknownQueue.Enqueue(cur); } } } // This should be the top HashSet <char> top = numberToSet[7].Except(numberToSet[1]).ToHashSet(); HashSet <char> bottom_L_segment = numberToSet[8].Except(numberToSet[7]).Except(numberToSet[4]).ToHashSet(); // Find 2 foreach (HashSet <char> cur in five) { if (bottom_L_segment.Intersect(cur).Count() == 2) { numberToSet[2] = cur; setToNumber[cur] = 2; break; } } HashSet <char> middle = numberToSet[2].Except(numberToSet[7]).Except(bottom_L_segment).ToHashSet(); // 8 minus middle = 0 HashSet <char> zero = numberToSet[8].Except(middle).ToHashSet(); numberToSet[0] = zero; setToNumber[zero] = 0; // find 3 5 foreach (HashSet <char> cur in five) { if (setToNumber.ContainsKey(cur)) { continue; } // 3 - 7 has 2 segments left if (cur.Except(numberToSet[7]).Count() == 2) { if (numberToSet.ContainsKey(3)) { Debug.Assert(numberToSet[3] == cur); } numberToSet[3] = cur; setToNumber[cur] = 3; } else { if (numberToSet.ContainsKey(5)) { Debug.Assert(numberToSet[5] == cur); } numberToSet[5] = cur; setToNumber[cur] = 5; } } // find 6 9 foreach (HashSet <char> cur in six) { if (setToNumber.ContainsKey(cur)) { continue; } if (cur.Except(numberToSet[7]).Count() == 4) { if (numberToSet.ContainsKey(6)) { Debug.Assert(cur.Except(numberToSet[6]).Count() == 0); } else { numberToSet[6] = cur; setToNumber[cur] = 6; } } else if (cur.Except(numberToSet[7]).Count() == 3) { if (cur.Except(numberToSet[0]).Count() == 0) { continue; } if (numberToSet.ContainsKey(9)) { Debug.Assert(cur.Except(numberToSet[9]).Count() == 0); } else { numberToSet[9] = cur; setToNumber[cur] = 9; } } } string output = string.Empty; foreach (HashSet <char> o in d.outputs) { foreach (var kv in numberToSet) { if (o.SetEquals(kv.Value)) { output = string.Format("{0}{1}", output, kv.Key); } } } total += int.Parse(output); } Assert.Equal(0, total); }
public static void F2() { var lines = new List <string>(); using (var fs = File.OpenRead("input.txt")) using (var sr = new StreamReader(fs)) { var sb = new StringBuilder(); string line; while ((line = sr.ReadLine()) != null) { if (line.Length != 0) { sb.AppendLine(line); } else { lines.Add(sb.ToString()); sb.Clear(); } } lines.Add(sb.ToString()); sb.Clear(); } var regex = new Regex(@"((\S+):(\S+))"); var valid = 0; foreach (var line in lines) { var matches = regex.Matches(line); if (matches.Count < 7 || matches.Count > 8) { continue; } var fields = new Dictionary <string, string>(); foreach (Match match in matches) { fields.Add(match.Groups[2].Value, match.Groups[3].Value); } if (RequiredFields.Except(fields.Keys).Any()) { continue; } var pidValid = ValidatePid(fields["pid"]); if (!pidValid) { continue; } var eclValid = ValidateEcl(fields["ecl"]); if (!eclValid) { continue; } var hclValid = ValidateHcl(fields["hcl"]); if (!hclValid) { continue; } var hgtValid = ValidateHgt(fields["hgt"]); if (!hgtValid) { continue; } var eyrValid = ValidateEyr(fields["eyr"]); if (!eyrValid) { continue; } var iyrValid = ValidateIyr(fields["iyr"]); if (!iyrValid) { continue; } var byrValid = ValidateByr(fields["byr"]); if (!byrValid) { continue; } valid += 1; } Console.WriteLine(valid); }
//Needs K_CollisionSet to be relative to the (0,0) cell! private HashSet <Vector2Int> getMainMapKMaxMinCells(TileObject[,] mainMap, ITypeGrid TypeGrid, int kMinStep, int kMaxStep, Vector2Int startCell, float percBacktrack) { HashSet <Vector2Int> UnionCollisionSet = new HashSet <Vector2Int>(); if (K_CollisionSet == null || K_CollisionSet.Length == 0) { ErrorManager.ManageError(ErrorManager.Error.HARD_ERROR, "No collision set for the main map has been built."); } int i = 0; int width = mainMap.GetLength(0); int height = mainMap.GetLength(1); //Ensure same cells in minimum kMinStep steps: EVERYTHING IS WRT ORIGIN CELL while (i <= kMinStep) { foreach (Vector2Int p in K_CollisionSet[i]) { UnionCollisionSet.Add(p); } i++; } //Ensure one or more paths from kMinStep up to kMaxStep: WE DON'T USE IT. IF USE IT CONSIDER WRT TO ORIGIN if (kMaxStep > kMinStep) { HashSet <Vector2Int> toBacktrack = new HashSet <Vector2Int>(); HashSet <Vector2Int> nextBacktrack = new HashSet <Vector2Int>(); foreach (Vector2Int p in K_CollisionSet[kMaxStep]) { Vector2Int x = p + startCell; if (Utility.in_bounds_General(x, width, height)) { toBacktrack.Add(x); } } if (toBacktrack.Count > 0) { int toRemove = (int)((1 - percBacktrack) * (float)toBacktrack.Count); //remove from toBacktrack in order to have percbacktrack elements in the set for (i = 0; i < toRemove; i++) { toBacktrack.Remove(toBacktrack.ElementAt(pRNG_Alias.Next(0, toBacktrack.Count))); } UnionCollisionSet.UnionWith(toBacktrack); //backtrack and reconstruct the paths for (i = 0; i < kMaxStep - kMinStep; i++) { foreach (Vector2Int v in toBacktrack) { HashSet <Vector2Int> a = new HashSet <Vector2Int>(Utility.getAllNeighbours_General(v, TypeGrid, width, height)); HashSet <Vector2Int> TmpSet = new HashSet <Vector2Int>(K_CollisionSet[kMaxStep - i - 1]); TmpSet.IntersectWith(a); if (TmpSet.Count == 1) { UnionCollisionSet.UnionWith(TmpSet); nextBacktrack.UnionWith(TmpSet); } } foreach (Vector2Int v in toBacktrack) { if (!nextBacktrack.Contains(v)) { HashSet <Vector2Int> a = new HashSet <Vector2Int>(Utility.getAllNeighbours_General(v, TypeGrid, width, height)); HashSet <Vector2Int> TmpSet = new HashSet <Vector2Int>(K_CollisionSet[kMaxStep - i - 1]); TmpSet.IntersectWith(a); TmpSet.Except(UnionCollisionSet); Vector2Int e = TmpSet.ElementAt(pRNG_Alias.Next(0, TmpSet.Count)); UnionCollisionSet.Add(e); nextBacktrack.Add(e); } } toBacktrack = new HashSet <Vector2Int>(nextBacktrack); } } } return(UnionCollisionSet); }
public void UpdateNotifications(Domain.Entity.Mill.Pipe pipeSavingState) { if (isProperlyCreated) { //* What can happen at Save Pipe: (NRO - non required inspection operation) //* - pipe is new and have no previous state (to update: NROs from current size type(new)) if (initialPipeSizeTypeId == Guid.Empty) { ProcessPipeTestResults(pipeSavingState.PipeTestResult); ProcessNROForPipeSizeType(pipeSavingState.Type.Id, pipeSavingState, NROWhatToDo.Add); } //* - pipe is existing and pipe size type changed (to update: NROs from previous size type(remove), NROs from current size type(new)) else if(pipeSavingState.Type == null || initialPipeSizeTypeId != pipeSavingState.Type.Id) { //update notification for old size type IList<Guid> removeId = new List<Guid>(); foreach (TestResultInfo t in initialTestResultList) { removeId.Add(t.OperationId); } removeId.Distinct(); foreach (Guid id in removeId) { manager.UpdateUnits(id); } foreach (Guid id in manager.cache.EnumerateOperationsForSizeType(initialPipeSizeTypeId)) { manager.cache.RemoveUnits(id, ChooseUnit(manager.cache.GetMeasure(id))); UpdateNotification(id); } //update notification for new size type ProcessPipeTestResults(pipeSavingState.PipeTestResult); foreach (Guid id in manager.cache.EnumerateOperationsForSizeType(pipeSavingState.Type.Id)) { manager.cache.AddUnits(id, ChooseUnit(manager.cache.GetMeasure(id), pipeSavingState)); UpdateNotification(id); } } //* - pipe is existing and operations were edited (to update: NROs from current size type(track changes)) else if (initialPipeSizeTypeId == pipeSavingState.Type.Id) { HashSet<NROInfo> initialState = new HashSet<NROInfo>(); initialState.UnionWith(initialNROList); HashSet<NROInfo> savingState = new HashSet<NROInfo>(); savingState.UnionWith(GetNROInfoListFromPipeTestResultList(pipeSavingState.PipeTestResult)); var resultList = savingState.Except(initialState); foreach (NROInfo result in resultList.Distinct()) { if (result.IsCompleted) { manager.UpdateUnits(result.OperationId); } } ProcessNROForPipeSizeType(pipeSavingState.Type.Id, pipeSavingState, NROWhatToDo.EditOperations); } //* - pipe deactivation (to update: NRO (remove)) else if (!pipeSavingState.IsActive) { ProcessPipeTestResults(pipeSavingState.PipeTestResult); ProcessNROForPipeSizeType(pipeSavingState.Type.Id, pipeSavingState, NROWhatToDo.Remove); } } NotificationService.Instance.NotifyInterested(); SavePipeState(pipeSavingState); }
public static void Func(IEnumerable <JToken> tnew, IEnumerable <JToken> told) { if (tnew == null || told == null) { return; } //将新的 Dictionary <string, JToken> dicnew = new Dictionary <string, JToken>(); foreach (var item in tnew) { var result = item.SelectToken("no"); dicnew.Add(result.Value <string>(), item); } Dictionary <string, JToken> dicold = new Dictionary <string, JToken>(); foreach (var item in told) { var result = item.SelectToken("no"); dicold.Add(result.Value <string>(), item); } //新增 HashSet <string> hadd = new HashSet <string>(dicnew.Select(p => p.Key)); var listadd = hadd.Except(dicold.Select(p => p.Key)); foreach (var s in listadd) { ChangeItem item = new ChangeItem { ChangeType = ChangeType.Add, ModifiedContent = dicnew[s].ToString() }; entity.Items.Add(item); } //移除 HashSet <string> hremove = new HashSet <string>(dicold.Select(p => p.Key)); var listremove = hremove.Except(dicnew.Select(p => p.Key)); foreach (var s in listremove) { ChangeItem item = new ChangeItem { ChangeType = ChangeType.Remove, ModifiedContent = dicold[s].ToString() }; entity.Items.Add(item); } //相等 foreach (var item in told) { var sd = item.SelectToken("no"); if (dicnew.ContainsKey(sd.ToString())) { var child1 = item.SelectToken("sonList"); var jso = dicnew[sd.ToString()]; var child2 = jso.SelectToken("sonList"); if (child1 != null) { var temp1 = item.DeepClone(); var temp2 = jso.DeepClone(); var r1 = JObject.Load(temp1.CreateReader()); var r2 = JObject.Load(temp2.CreateReader()); r1.Remove("sonList"); r2.Remove("sonList"); var rr = r1.Root; var diff = new JsonDifferentiator(); var result = diff.Differentiate(r1.Root, r2.Root); if (result != null) { var old = diff.list; var diff2 = new JsonDifferentiator(); diff2.Differentiate(jso, item); var news = diff2.list; ChangeItem change = new ChangeItem { ChangeType = ChangeType.Modify, OriginalContent = item.ToString(), OriginalKey = old.FirstOrDefault()?.Key, OriginalValue = old.FirstOrDefault()?.Value, ModifiedContent = jso.ToString(), ModifiedKey = news.FirstOrDefault()?.Key, ModifiedValue = news.FirstOrDefault()?.Value }; entity.Items.Add(change); } Func(child1, child2); } else { var diff = new JsonDifferentiator(); var result = diff.Differentiate(item, jso); if (result != null) { var old = diff.list; var diff2 = new JsonDifferentiator(); diff2.Differentiate(jso, item); var news = diff2.list; ChangeItem change = new ChangeItem { ChangeType = ChangeType.Modify, OriginalContent = item.ToString(), OriginalKey = old.FirstOrDefault()?.Key, OriginalValue = old.FirstOrDefault()?.Value, ModifiedContent = jso.ToString(), ModifiedKey = news.FirstOrDefault()?.Key, ModifiedValue = news.FirstOrDefault()?.Value }; entity.Items.Add(change); } } } //if 包含sonList 继续 } }
public async Task <ActionResult> UpdateRole(int id, Role role, List <int> selectedActionCategories, List <int> selectedActions) { if (ModelState.IsValid) { Role roleToBeUpdated = await _dbContext.Roles.FindAsync(role.RoleId); if (roleToBeUpdated != null) { if (TryUpdateModel(roleToBeUpdated, "", new string[] { "RoleName", "Description" })) { if (!String.Equals(role.RoleName, "SuperAdmin", StringComparison.CurrentCultureIgnoreCase)) { //#region UpdatingRoleActionCategories //var roleActionCategoriesHs = new HashSet<int>(roleToBeUpdated.ActionCategories.Select(x => x.ActionCategoryId).ToList()); //var selectedActionCategoriesHs = new HashSet<int>(selectedActionCategories); //var roleActionCategoriesToBeDeleted = roleActionCategoriesHs.Except(selectedActionCategoriesHs).ToList(); //var roleActionCategoriesToBeAdded = selectedActionCategoriesHs.Except(roleActionCategoriesHs).ToList(); //foreach (int actionCategoryId in roleActionCategoriesToBeDeleted) //{ // ActionCategory actionCategoryToBeRemoved = roleToBeUpdated.ActionCategories.FirstOrDefault(x => x.ActionCategoryId == actionCategoryId); // roleToBeUpdated.ActionCategories.Remove(actionCategoryToBeRemoved); //} //foreach (int actionCategoryId in roleActionCategoriesToBeAdded) //{ // ActionCategory actionCategory = await _dbContext.ActionCategories.FindAsync(actionCategoryId); // roleToBeUpdated.ActionCategories.Add(actionCategory); //} //#endregion #region UpdatingRoleActions var roleActionsHs = new HashSet <int>(roleToBeUpdated.ControllerActions.Select(x => x.ControllerActionId).ToList()); var selectedActionsHs = new HashSet <int>(selectedActions); var roleActionsToBeDeleted = roleActionsHs.Except(selectedActionsHs).ToList(); var roleActionsToBeAdded = selectedActionsHs.Except(roleActionsHs).ToList(); foreach (int actionId in roleActionsToBeDeleted) { ControllerAction controllerActionToRemove = roleToBeUpdated.ControllerActions.FirstOrDefault(x => x.ControllerActionId == actionId); roleToBeUpdated.ControllerActions.Remove(controllerActionToRemove); } foreach (var actionId in roleActionsToBeAdded) { ControllerAction controllerAction = await _dbContext.ControllerActions.FindAsync(actionId); roleToBeUpdated.ControllerActions.Add(controllerAction); } #endregion } await _dbContext.SaveChangesAsync(); return(RedirectToAction("RoleList")); } } return(RedirectToAction("RoleList")); } ViewBag.AllActionCategories = await _dbContext.ActionCategories.ToListAsync(); return(View(role)); }
public Value Difference() => new Set(hashSet.Except(getOtherSet().hashSet));
public async Task <IActionResult> Edit(int id, string returnUrl, [Bind("Resource, Tags")] AdminEditViewModel editedResource) { //TODO: implement the anti-overposting mechanism in the Contoso University example. if (id != editedResource.Resource.ID) { return(NotFound()); } if (ModelState.IsValid) { try { var resourceToUpdate = editedResource.Resource; _context.Update(editedResource.Resource); // process tags var currentTagAssociations = await _context.ResourceTags .Include(rt => rt.Tag) .AsNoTracking() .Where(rt => rt.ResourceID == id) .ToListAsync(); //existing tags var tagsBefore = currentTagAssociations.Select(rt => rt.Tag.Name).ToHashSet(); var tagsAfter = new HashSet <string>(); if (!string.IsNullOrWhiteSpace(editedResource.Tags)) { tagsAfter = editedResource.Tags.Split(",") .Where(p => !string.IsNullOrWhiteSpace(p)) .Select(p => p?.Trim()).ToList().ToHashSet(); } var tagNamesToRemoveFromResource = tagsBefore.Except(tagsAfter); if (tagNamesToRemoveFromResource.Count() != 0) { var tagsToRemove = from t in currentTagAssociations where tagNamesToRemoveFromResource.Contains(t.Tag.Name) select t; _context.ResourceTags.RemoveRange(tagsToRemove); } var tagNamesToAddToResource = new HashSet <string>(tagsAfter.Except(tagsBefore)); // add unknown tags if (tagNamesToAddToResource.Count() != 0) { var existingTags = _context.Tags.AsNoTracking().Select(t => t.Name).ToList(); var unknownTagNames = tagNamesToAddToResource.Except(new HashSet <string>(existingTags)); foreach (var unknownTagName in unknownTagNames) { var tagEntity = _context.Tags.Add(new Tag { Name = unknownTagName }); await _context.SaveChangesAsync(); } } // add tagNamesToAddToResource to the resource foreach (var tagName in tagNamesToAddToResource) { var tag = _context.Tags.AsNoTracking().FirstOrDefault(t => string.Equals(t.Name, tagName, StringComparison.Ordinal)); _context.ResourceTags.Add(new ResourceTag() { ResourceID = resourceToUpdate.ID, TagID = tag.ID }); } await _context.SaveChangesAsync(); } catch (DbUpdateConcurrencyException) { if (!ResourceExists(editedResource.Resource.ID)) { return(NotFound()); } else { throw; } } if (string.IsNullOrEmpty(returnUrl) || !Url.IsLocalUrl(returnUrl)) { return(RedirectToAction("Index")); } return(Redirect(returnUrl)); } return(View(editedResource)); }
private void GetMemberChangesFromCSEntryChange(CSEntryChange csentry, HashSet <string> adds, HashSet <string> deletes, HashSet <string> existingMembers, string attributeName, bool replacing) { if (replacing) { foreach (string address in csentry.GetValueAdds <string>(attributeName)) { adds.Add(address); } foreach (string address in deletes.Except(adds)) { deletes.Add(address); } return; } else { AttributeChange change = csentry.AttributeChanges.FirstOrDefault(t => t.Name == attributeName); if (change == null) { return; } switch (change.ModificationType) { case AttributeModificationType.Add: foreach (string address in csentry.GetValueAdds <string>(attributeName)) { this.ValidateAddress(address, attributeName); adds.Add(address); } break; case AttributeModificationType.Delete: foreach (string member in existingMembers) { this.ValidateAddress(member, attributeName); deletes.Add(member); } break; case AttributeModificationType.Replace: IList <string> newMembers = csentry.GetValueAdds <string>(attributeName); foreach (string address in newMembers) { this.ValidateAddress(address, attributeName); adds.Add(address); } foreach (string member in existingMembers.Except(newMembers)) { this.ValidateAddress(member, attributeName); deletes.Add(member); } break; case AttributeModificationType.Update: foreach (string address in csentry.GetValueDeletes <string>(attributeName)) { this.ValidateAddress(address, attributeName); deletes.Add(address); } foreach (string address in csentry.GetValueAdds <string>(attributeName)) { this.ValidateAddress(address, attributeName); adds.Add(address); } break; case AttributeModificationType.Unconfigured: default: throw new NotSupportedException("The modification type was unknown or unsupported"); } } }
public void IntersectWith(IEnumerable <T> other) { var removals = set.Except(other).ToArray(); ExceptWith(removals); }
public void TypoTest() { var types = Assembly.GetTypes(); int totalErrors = 0; foreach (Type t in types) { if (t.IsPublic) { AttributesMessageTypoRules(t, t.Name, ref totalErrors); if (IsObsolete(t)) { continue; } string txt = NameCleaner(t.Name); var typo = GetTypo(txt); if (typo.Length > 0) { if (!Skip(t, typo)) { ReportError("Typo in TYPE: {0} - {1} ", t.Name, typo); totalErrors++; } } var fields = t.GetFields(); foreach (FieldInfo f in fields) { if (!f.IsPublic && !f.IsFamily) { continue; } AttributesMessageTypoRules(f, t.Name, ref totalErrors); if (IsObsolete(f)) { continue; } txt = NameCleaner(f.Name); typo = GetTypo(txt); if (typo.Length > 0) { if (!Skip(f, typo)) { ReportError("Typo in FIELD name: {0} - {1}, Type: {2}", f.Name, typo, t.Name); totalErrors++; } } } var methods = t.GetMethods(); foreach (MethodInfo m in methods) { if (!m.IsPublic && !m.IsFamily) { continue; } AttributesMessageTypoRules(m, t.Name, ref totalErrors); if (IsObsolete(m)) { continue; } txt = NameCleaner(m.Name); typo = GetTypo(txt); if (typo.Length > 0) { if (!Skip(m, typo)) { ReportError("Typo in METHOD name: {0} - {1}, Type: {2}", m.Name, typo, t.Name); totalErrors++; } } #if false var parameters = m.GetParameters(); foreach (ParameterInfo p in parameters) { txt = NameCleaner(p.Name); typo = GetTypo(txt); if (typo.Length > 0) { ReportError("Typo in PARAMETER Name: {0} - {1}, Method: {2}, Type: {3}", p.Name, typo, m.Name, t.Name); totalErrors++; } } #endif } } } #if false // ease removal of unrequired values (but needs to be checked for every profile) var unused = allowed.Except(used); foreach (var typo in unused) { Console.WriteLine("Unused entry \"{0}\"", typo); } #endif Assert.IsTrue((totalErrors == 0), "We have {0} typos!", totalErrors); }
public void Can_skip_entities_while_parsing() { using (var strm = File.OpenRead(@"TestFiles\4walls1floorSite.ifc")) { var ifc2x3MetaData = ExpressMetaData.GetMetadata((new Xbim.Ifc2x3.EntityFactoryIfc2x3()).GetType().GetTypeInfo().Module); var ifc4MetaData = ExpressMetaData.GetMetadata((new Xbim.Ifc4.EntityFactoryIfc4()).GetType().GetTypeInfo().Module); var allTypes = new HashSet <string>( ifc2x3MetaData.Types().Where(et => typeof(IPersistEntity).IsAssignableFrom(et.Type) && !et.Type.IsAbstract).Select(et => et.ExpressNameUpper) .Concat(ifc2x3MetaData.Types().Where(et => typeof(IPersistEntity).IsAssignableFrom(et.Type) && !et.Type.IsAbstract).Select(et => et.ExpressNameUpper))); var requiredTypes = new HashSet <string>(); foreach (var metadata in new[] { ifc2x3MetaData }) { foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcProject))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcAsset))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcSystem)).Where(t => !typeof(IIfcStructuralAnalysisModel).IsAssignableFrom(t.Type))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcActor))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcElement)).Where(t => !typeof(IIfcFeatureElement).IsAssignableFrom(t.Type) && !typeof(IIfcVirtualElement).IsAssignableFrom(t.Type))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcSpatialElement))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcProxy))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcTypeProduct))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcPropertySetDefinitionSelect))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcRelDefinesByProperties))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcSimpleProperty))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcElementQuantity))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcPhysicalSimpleQuantity))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcRelDefinesByType))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcUnitAssignment))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcNamedUnit))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcMeasureWithUnit))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcDimensionalExponents))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcRelAssociatesClassification))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcClassificationReference))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcClassification))) { requiredTypes.Add(t.ExpressNameUpper); } // foreach (var t in metadata.ExpressTypesImplementing(typeof(Xbim.Ifc2x3.DateTimeResource.IfcCalendarDate))) requiredTypes.Add(t.ExpressNameUpper); foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcActorSelect))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcAddress))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcApplication))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcActorRole))) { requiredTypes.Add(t.ExpressNameUpper); } foreach (var t in metadata.ExpressTypesImplementing(typeof(IIfcDocumentSelect))) { requiredTypes.Add(t.ExpressNameUpper); } } var unwantedTypes = allTypes.Except(requiredTypes); var unwanted = new HashSet <string>(unwantedTypes); using (var mm = MemoryModel.OpenReadStep21(strm, null, null, unwantedTypes.ToList())) { foreach (var instance in mm.Instances) { Assert.IsFalse(unwanted.Contains(instance.ExpressType.ExpressNameUpper)); } } } }
/// <summary> /// Resyncs the user with our view of the neighbors /// </summary> /// <param name="newDrawDistance">The new DD for the user</param> /// <param name="resyncDelay">Delay before executing the resync. We delay on a region crossing because the viewer locks up sometimes when freeing memory</param> /// <returns></returns> private async Task CalculateAndResyncNeighbors(uint newDrawDistance, int resyncDelay) { uint xmin, xmax, ymin, ymax; Util.GetDrawDistanceBasedRegionRectangle((uint)newDrawDistance, _scene.RegionInfo.RegionLocX, _scene.RegionInfo.RegionLocY, out xmin, out xmax, out ymin, out ymax); //get our current neighbor list List<SimpleRegionInfo> knownNeighborsList = _scene.SurroundingRegions.GetKnownNeighborsWithinClientDD(newDrawDistance); Dictionary<ulong, SimpleRegionInfo> knownNeighborsDict = new Dictionary<ulong, SimpleRegionInfo>(); foreach (var neighbor in knownNeighborsList) { knownNeighborsDict.Add(neighbor.RegionHandle, neighbor); } HashSet<ulong> knownNeighbors = new HashSet<ulong>(knownNeighborsList.Select(x => x.RegionHandle)); List<ulong> deadRegions; List<ulong> newRegions; lock (_remotePresences) { //check the list of what we have vs what we should have HashSet<ulong> usersRegions = new HashSet<ulong>(); //add all regions from the presence foreach (var presence in _remotePresences.Values) { knownNeighborsDict[presence.PresenceInfo.RegionInfo.RegionHandle] = presence.PresenceInfo.RegionInfo; //dont put far regions into this update, they shouldnt be dropped by DD changes if (!presence.IsFarPresence) { usersRegions.Add(presence.PresenceInfo.RegionInfo.RegionHandle); } } // regions that we have but that we shouldnt have anymore deadRegions = new List<ulong>(usersRegions.Except(knownNeighbors)); // regions that we don't have that we need to add newRegions = new List<ulong>(knownNeighbors.Except(usersRegions)); } try { await _operationSemaphore.WaitAsync(); if (resyncDelay > 0) await Task.Delay(resyncDelay); await this.ResyncRegions(knownNeighborsDict, deadRegions, newRegions); } finally { _operationSemaphore.Release(); } }
private static void TraceToSource(Bus FromBus, Bus BusThatStartedItAll, HashSet<Bus> busesInThisTrace, HashSet<Bus> busesOnPath) { //start at FromBus, take a step away from FromBus. //If we're at the source, add the path that we took to BusesOnRouteToSource. //If we're at a dead-end, then stop tracing this branch. //If we hit a bus that's already on the route, then terminate and add the branch. var connectedBuses = FromBus.ConnectedTo.OfType<Line>().Select(line => line.ConnectedTo.OfType<Bus>().Except(FromBus.Yield()).Single()) //all the buses neighbouring this one .Except(busesInThisTrace) //exclude any buses we've already touched in this trace ; foreach (var bus in connectedBuses) { if (busesOnPath.Contains(bus)) //we're connected to the target bus. no further processing required on this branch. { busesOnPath.UnionWith(busesInThisTrace.Except(BusThatStartedItAll.Yield())); continue; } else { //keep searching! HashSet<Bus> NextStepTraceList; if (connectedBuses.Count() == 1) //if this is the only possible way forward, then just keep using the same thingy. { NextStepTraceList = busesInThisTrace; } else { NextStepTraceList = new HashSet<Bus>(busesInThisTrace); } NextStepTraceList.Add(bus); TraceToSource(bus, BusThatStartedItAll, NextStepTraceList, busesOnPath); } } }
/// <summary> /// Given a list of symbols, determine which are not recommended at the same position in linked documents. /// </summary> /// <param name="expectedSymbols">The symbols recommended in the active context.</param> /// <param name="linkedContextSymbolLists">The symbols recommended in linked documents</param> /// <returns>The list of projects each recommended symbol did NOT appear in.</returns> protected Dictionary<ISymbol, List<ProjectId>> FindSymbolsMissingInLinkedContexts(HashSet<ISymbol> expectedSymbols, IEnumerable<Tuple<DocumentId, AbstractSyntaxContext, IEnumerable<ISymbol>>> linkedContextSymbolLists) { var missingSymbols = new Dictionary<ISymbol, List<ProjectId>>(LinkedFilesSymbolEquivalenceComparer.IgnoreAssembliesInstance); foreach (var linkedContextSymbolList in linkedContextSymbolLists) { var symbolsMissingInLinkedContext = expectedSymbols.Except(linkedContextSymbolList.Item3, LinkedFilesSymbolEquivalenceComparer.IgnoreAssembliesInstance); foreach (var missingSymbol in symbolsMissingInLinkedContext) { missingSymbols.GetOrAdd(missingSymbol, (m) => new List<ProjectId>()).Add(linkedContextSymbolList.Item1.ProjectId); } } return missingSymbols; }
/// <summary> /// Call to find interpreters in the associated project. Separated from /// the constructor to allow exceptions to be handled without causing /// the project node to be invalid. /// </summary> private bool DiscoverInterpreters(ProjectInfo projectInfo) { // <Interpreter Include="InterpreterDirectory"> // <Id>factoryProviderId;interpreterFactoryId</Id> // <BaseInterpreter>factoryProviderId;interpreterFactoryId</BaseInterpreter> // <Version>...</Version> // <InterpreterPath>...</InterpreterPath> // <WindowsInterpreterPath>...</WindowsInterpreterPath> // <LibraryPath>...</LibraryPath> // <PathEnvironmentVariable>...</PathEnvironmentVariable> // <Description>...</Description> // </Interpreter> var projectHome = PathUtils.GetAbsoluteDirectoryPath( Path.GetDirectoryName(projectInfo.FullPath), projectInfo.GetPropertyValue("ProjectHome") ); var factories = new Dictionary <string, FactoryInfo>(); foreach (var item in projectInfo.GetInterpreters()) { // Errors in these options are fatal, so we set anyError and // continue with the next entry. var dir = GetValue(item, "EvaluatedInclude"); if (!PathUtils.IsValidPath(dir)) { Log("Interpreter has invalid path: {0}", dir ?? "(null)"); continue; } dir = PathUtils.GetAbsoluteDirectoryPath(projectHome, dir); var id = GetValue(item, MSBuildConstants.IdKey); if (string.IsNullOrEmpty(id)) { Log("Interpreter {0} has invalid value for '{1}': {2}", dir, MSBuildConstants.IdKey, id); continue; } if (factories.ContainsKey(id)) { Log("Interpreter {0} has a non-unique id: {1}", dir, id); continue; } var verStr = GetValue(item, MSBuildConstants.VersionKey); Version ver; if (string.IsNullOrEmpty(verStr) || !Version.TryParse(verStr, out ver)) { Log("Interpreter {0} has invalid value for '{1}': {2}", dir, MSBuildConstants.VersionKey, verStr); continue; } // The rest of the options are non-fatal. We create an instance // of NotFoundError with an amended description, which will // allow the user to remove the entry from the project file // later. bool hasError = false; bool hasDescription = true; var description = GetValue(item, MSBuildConstants.DescriptionKey); if (string.IsNullOrEmpty(description)) { hasDescription = false; description = PathUtils.CreateFriendlyDirectoryPath(projectHome, dir); } var baseInterpId = GetValue(item, MSBuildConstants.BaseInterpreterKey); var path = GetValue(item, MSBuildConstants.InterpreterPathKey); if (!PathUtils.IsValidPath(path)) { Log("Interpreter {0} has invalid value for '{1}': {2}", dir, MSBuildConstants.InterpreterPathKey, path); hasError = true; } else if (!hasError) { path = PathUtils.GetAbsoluteFilePath(dir, path); } var winPath = GetValue(item, MSBuildConstants.WindowsPathKey); if (!PathUtils.IsValidPath(winPath)) { Log("Interpreter {0} has invalid value for '{1}': {2}", dir, MSBuildConstants.WindowsPathKey, winPath); hasError = true; } else if (!hasError) { winPath = PathUtils.GetAbsoluteFilePath(dir, winPath); } var libPath = GetValue(item, MSBuildConstants.LibraryPathKey); if (string.IsNullOrEmpty(libPath)) { libPath = "lib"; } if (!PathUtils.IsValidPath(libPath)) { Log("Interpreter {0} has invalid value for '{1}': {2}", dir, MSBuildConstants.LibraryPathKey, libPath); hasError = true; } else if (!hasError) { libPath = PathUtils.GetAbsoluteDirectoryPath(dir, libPath); } InterpreterConfiguration baseInterp = null; if (!string.IsNullOrEmpty(baseInterpId)) { // It's a valid GUID, so find a suitable base. If we // don't find one now, we'll try and figure it out from // the pyvenv.cfg/orig-prefix.txt files later. // Using an empty GUID will always go straight to the // later lookup. baseInterp = FindConfiguration(baseInterpId); } var pathVar = GetValue(item, MSBuildConstants.PathEnvVarKey); if (string.IsNullOrEmpty(pathVar)) { if (baseInterp != null) { pathVar = baseInterp.PathEnvironmentVariable; } else { pathVar = "PYTHONPATH"; } } string arch = null; if (baseInterp == null) { arch = GetValue(item, MSBuildConstants.ArchitectureKey); if (string.IsNullOrEmpty(arch)) { arch = "x86"; } } if (baseInterp == null && !hasError) { // Only thing missing is the base interpreter, so let's try // to find it using paths baseInterp = FindBaseInterpreterFromVirtualEnv(dir, libPath); if (baseInterp == null) { Log("Interpreter {0} has invalid value for '{1}': {2}", dir, MSBuildConstants.BaseInterpreterKey, baseInterpId ?? "(null)"); hasError = true; } } string fullId = GetInterpreterId(projectInfo.FullPath, id); FactoryInfo info; if (hasError) { info = new ErrorFactoryInfo(fullId, ver, description, dir); } else { Debug.Assert(baseInterp != null, "we reported an error if we didn't have a base interpreter"); if (!hasDescription) { description = string.Format("{0} ({1})", description, baseInterp.Description); } info = new ConfiguredFactoryInfo( this, baseInterp, new InterpreterConfiguration( fullId, description, dir, path, winPath, libPath, pathVar, baseInterp.Architecture, baseInterp.Version, InterpreterUIMode.CannotBeDefault | InterpreterUIMode.CannotBeConfigured | InterpreterUIMode.SupportsDatabase ) ); } MergeFactory(projectInfo, factories, info); } HashSet <FactoryInfo> previousFactories = new HashSet <FactoryInfo>(); if (projectInfo.Factories != null) { previousFactories.UnionWith(projectInfo.Factories.Values); } HashSet <FactoryInfo> newFactories = new HashSet <FactoryInfo>(factories.Values); bool anyChange = !newFactories.SetEquals(previousFactories); if (anyChange || projectInfo.Factories == null) { // Lock here mainly to ensure that any searches complete before // we trigger the changed event. lock (projectInfo) { projectInfo.Factories = factories; } foreach (var removed in previousFactories.Except(newFactories)) { projectInfo.ContextProvider.InterpreterUnloaded( projectInfo.Context, removed.Config ); IDisposable disp = removed as IDisposable; if (disp != null) { disp.Dispose(); } } foreach (var added in newFactories.Except(previousFactories)) { foreach (var factory in factories) { projectInfo.ContextProvider.InterpreterLoaded( projectInfo.Context, factory.Value.Config ); } } } return(anyChange); }
/// <summary>Gets entries in t1 that are missing from t2.</summary> List <string> FindMissing(HashSet <string> t1, HashSet <string> t2) { return(t1.Except(t2).OrderBy(s => s).ToList()); }
/// <summary> /// Returns the known properties that have not been modified through this <see cref="Delta"/> as an /// <see cref="IEnumerable{T}" /> of property Names. Does not include the names of the changed dynamic /// properties. /// </summary> public override IEnumerable <string> GetUnchangedPropertyNames() { return(_updatableProperties.Except(GetChangedPropertyNames())); }
private void LoadUsersByGroups(ADServer server, DirectoryEntry rootDSE, string prevHighUSN) { //HashSet<string> usersInGroup = new HashSet<string>(StringComparer.OrdinalIgnoreCase); List <string> usersToUpdate = new List <string>(); using (DirectorySearcher ds = new DirectorySearcher(rootDSE)) { // if '>=' then we get last update twice // Note that the operators "<" and ">" are not supported. See "LDAP syntax filter clause" ds.Filter = string.Format("(&(objectClass=group)(uSNChanged>={0})(!(uSNChanged={0})))", prevHighUSN); ds.SizeLimit = 0; // unlimited ds.PageSize = 1000; ds.PropertiesToLoad.Add("distinguishedname"); ds.PropertiesToLoad.Add("member"); // looking for changed membership using (SearchResultCollection results = ds.FindAll()) { var cnt = results.Count; if (results != null && cnt > 0) { foreach (SearchResult gr in results) { var dn = (string)gr.Properties["distinguishedname"][0]; var membersGr = gr.Properties["member"]; ISet <string> cachedMembers; if (groupCache.TryGetValue(dn, out cachedMembers)) { var members = new HashSet <string>(StringComparer.OrdinalIgnoreCase); foreach (string m in membersGr) { members.Add(m); } if (!Utils.CheckEquals(cachedMembers, members)) { var usersDiff = cachedMembers.Except(members).Union(members.Except(cachedMembers)).ToArray(); log.LogInfo("Membership changed in Group '" + dn + "' DiffMembers: " + string.Join(";", usersDiff.Take(5))); usersToUpdate.AddRange(usersDiff); } groupCache[dn] = members; // update cache } } } } } foreach (var u in usersToUpdate.Distinct(StringComparer.OrdinalIgnoreCase)) { LoadUsersByFilter(server, rootDSE, "(&(objectClass=user)(objectCategory=person)(distinguishedName=" + u + "))"); } }
void HandleNewTerrainsAndObjects() { m_oldKnownLocations = m_newKnownLocations; m_newKnownLocations = CollectLocations(); m_oldKnownObjects = m_newKnownObjects; m_newKnownObjects = CollectObjects(m_newKnownLocations); var revealedLocations = m_newKnownLocations.Except(m_oldKnownLocations); var revealedObjects = m_newKnownObjects.Except(m_oldKnownObjects); SendNewTerrains(revealedLocations); SendNewObjects(revealedObjects); }
/// <summary> /// Добавляет узлы в триангуляцию /// </summary> /// <param name="pivotPoints">Список опорных точек</param> private static void AddPoints(HashSet <Triangle> trianglesHashSet) { var pointsToAdd = newPoints.Except(pivotPoints); foreach (Pixel curPoint in pointsToAdd) { int i; // Цикл локализации точки (пока точка не попадёт в текущий треугольник либо на его границу) int prtCheck = Geometry.PointRelativelyTriangle(curPoint, curTriangle.points[0], curTriangle.points[1], curTriangle.points[2]); while (prtCheck == -1) { i = 0; while (Geometry.PointsRelativelyStraight(curTriangle.points[i], curPoint, edges[curTriangle.edges[i]]) != -1) { ++i; } curTriangle = curTriangle.triangles[i]; prtCheck = Geometry.PointRelativelyTriangle(curPoint, curTriangle.points[0], curTriangle.points[1], curTriangle.points[2]); } // Если точка внутри треугольника if (prtCheck == 1) { Triangle newTriangle0 = new Triangle(curTriangle.points[1], curTriangle.points[2], curPoint); Triangle newTriangle1 = new Triangle(curTriangle.points[2], curTriangle.points[0], curPoint); Triangle newTriangle2 = new Triangle(curTriangle.points[0], curTriangle.points[1], curPoint); trianglesHashSet.Add(newTriangle0); trianglesHashSet.Add(newTriangle1); trianglesHashSet.Add(newTriangle2); trianglesHashSet.Remove(curTriangle); // Заполнение нулевого треугольника newTriangle0.edges[0] = new Edge(); edges.Add(newTriangle0.edges[0], new Section(newTriangle0.points[1], newTriangle0.points[2])); newTriangle0.edges[1] = new Edge(); edges.Add(newTriangle0.edges[1], new Section(newTriangle0.points[2], newTriangle0.points[0])); newTriangle0.edges[2] = curTriangle.edges[0]; newTriangle0.triangles[0] = newTriangle1; newTriangle0.triangles[1] = newTriangle2; newTriangle0.triangles[2] = curTriangle.triangles[0]; i = 0; if (newTriangle0.triangles[2] != null) { while (newTriangle0.triangles[2].edges[i] != newTriangle0.edges[2]) { ++i; } newTriangle0.triangles[2].triangles[i] = newTriangle0; } // Заполнение первого треугольника newTriangle1.edges[0] = new Edge(); edges.Add(newTriangle1.edges[0], new Section(newTriangle1.points[1], newTriangle1.points[2])); newTriangle1.edges[1] = newTriangle0.edges[0]; newTriangle1.edges[2] = curTriangle.edges[1]; newTriangle1.triangles[0] = newTriangle2; newTriangle1.triangles[1] = newTriangle0; newTriangle1.triangles[2] = curTriangle.triangles[1]; i = 0; if (newTriangle1.triangles[2] != null) { while (newTriangle1.triangles[2].edges[i] != newTriangle1.edges[2]) { ++i; } newTriangle1.triangles[2].triangles[i] = newTriangle1; } // Заполнение второго треугольника newTriangle2.edges[0] = newTriangle0.edges[1]; newTriangle2.edges[1] = newTriangle1.edges[0]; newTriangle2.edges[2] = curTriangle.edges[2]; newTriangle2.triangles[0] = newTriangle0; newTriangle2.triangles[1] = newTriangle1; newTriangle2.triangles[2] = curTriangle.triangles[2]; i = 0; if (newTriangle2.triangles[2] != null) { while (newTriangle2.triangles[2].edges[i] != newTriangle2.edges[2]) { ++i; } newTriangle2.triangles[2].triangles[i] = newTriangle2; } // Заносим три новых треугольника в множество для проверки условия Делоне trianglesForDelaunayCheck.Add(newTriangle0); trianglesForDelaunayCheck.Add(newTriangle1); trianglesForDelaunayCheck.Add(newTriangle2); curTriangle = newTriangle2; } // Если точка на ребре else { int e = prtCheck / 10; // Индекс ребра, на котором оказалась точка switch (prtCheck) { case 0: e = 0; break; case 10: e = 1; break; case 20: e = 2; break; default: e = 3; // Дальше выпадет ошибка break; } // Если точка на границе изображения if (curTriangle.triangles[e] == null) { Triangle newTriangle0 = new Triangle(curTriangle.points[(e + 2) % 3], curTriangle.points[e], curPoint); Triangle newTriangle1 = new Triangle(curTriangle.points[e], curTriangle.points[(e + 1) % 3], curPoint); trianglesHashSet.Add(newTriangle0); trianglesHashSet.Add(newTriangle1); trianglesHashSet.Remove(curTriangle); // Заполнение нулевого треугольника newTriangle0.edges[0] = new Edge(); edges.Add(newTriangle0.edges[0], new Section(newTriangle0.points[1], newTriangle0.points[2])); newTriangle0.edges[1] = new Edge(); edges.Add(newTriangle0.edges[1], new Section(newTriangle0.points[0], newTriangle0.points[2])); newTriangle0.edges[2] = curTriangle.edges[(e + 1) % 3]; newTriangle0.triangles[0] = newTriangle1; newTriangle0.triangles[1] = null; newTriangle0.triangles[2] = curTriangle.triangles[(e + 1) % 3]; i = 0; if (newTriangle0.triangles[2] != null) { while (newTriangle0.edges[2] != newTriangle0.triangles[2].edges[i]) { ++i; } newTriangle0.triangles[2].triangles[i] = newTriangle0; } // Заполнение первого треугольника newTriangle1.edges[0] = new Edge(); edges.Add(newTriangle1.edges[0], new Section(newTriangle1.points[1], newTriangle1.points[2])); newTriangle1.edges[1] = newTriangle0.edges[0]; newTriangle1.edges[2] = curTriangle.edges[(e + 2) % 3]; newTriangle1.triangles[0] = null; newTriangle1.triangles[1] = newTriangle0; newTriangle1.triangles[2] = curTriangle.triangles[(e + 2) % 3]; i = 0; if (newTriangle1.triangles[2] != null) { while (newTriangle1.edges[2] != newTriangle1.triangles[2].edges[i]) { ++i; } newTriangle1.triangles[2].triangles[i] = newTriangle1; } // Заносим новые треугольники в множество для проверки условия Делоне trianglesForDelaunayCheck.Add(newTriangle0); trianglesForDelaunayCheck.Add(newTriangle1); edges.Remove(curTriangle.edges[e]); curTriangle = newTriangle1; } // Точка не на границе изображения else { Triangle curTriangle2 = curTriangle.triangles[e]; int nEI = 0; // Индекс ребра, которому принадлежит точка, для смежного треугольника while (curTriangle.edges[e] != curTriangle2.edges[nEI]) { ++nEI; } Triangle newTriangle0 = new Triangle(curTriangle.points[(e + 2) % 3], curTriangle.points[e], curPoint); Triangle newTriangle1 = new Triangle(curTriangle.points[e], curTriangle.points[(e + 1) % 3], curPoint); Triangle newTriangle2 = new Triangle(curTriangle2.points[(nEI + 2) % 3], curTriangle2.points[nEI], curPoint); Triangle newTriangle3 = new Triangle(curTriangle2.points[nEI], curTriangle2.points[(nEI + 1) % 3], curPoint); trianglesHashSet.Add(newTriangle0); trianglesHashSet.Add(newTriangle1); trianglesHashSet.Add(newTriangle2); trianglesHashSet.Add(newTriangle3); trianglesHashSet.Remove(curTriangle); trianglesHashSet.Remove(curTriangle2); // Заполнение нулевого треугольника newTriangle0.edges[0] = new Edge(); edges.Add(newTriangle0.edges[0], new Section(newTriangle0.points[1], newTriangle0.points[2])); newTriangle0.edges[1] = new Edge(); edges.Add(newTriangle0.edges[1], new Section(newTriangle0.points[0], newTriangle0.points[2])); newTriangle0.edges[2] = curTriangle.edges[(e + 1) % 3]; newTriangle0.triangles[0] = newTriangle1; newTriangle0.triangles[1] = newTriangle3; newTriangle0.triangles[2] = curTriangle.triangles[(e + 1) % 3]; i = 0; if (newTriangle0.triangles[2] != null) { while (newTriangle0.edges[2] != newTriangle0.triangles[2].edges[i]) { ++i; } newTriangle0.triangles[2].triangles[i] = newTriangle0; } // Заполнение первого треугольника newTriangle1.edges[0] = new Edge(); edges.Add(newTriangle1.edges[0], new Section(newTriangle1.points[2], newTriangle1.points[1])); newTriangle1.edges[1] = newTriangle0.edges[0]; newTriangle1.edges[2] = curTriangle.edges[(e + 2) % 3]; newTriangle1.triangles[0] = newTriangle2; newTriangle1.triangles[1] = newTriangle0; newTriangle1.triangles[2] = curTriangle.triangles[(e + 2) % 3]; i = 0; if (newTriangle1.triangles[2] != null) { while (newTriangle1.edges[2] != newTriangle1.triangles[2].edges[i]) { ++i; } newTriangle1.triangles[2].triangles[i] = newTriangle1; } // Заполнение второго треугольника newTriangle2.edges[0] = new Edge(); edges.Add(newTriangle2.edges[0], new Section(newTriangle2.points[1], newTriangle2.points[2])); newTriangle2.edges[1] = newTriangle1.edges[0]; newTriangle2.edges[2] = curTriangle2.edges[(nEI + 1) % 3]; newTriangle2.triangles[0] = newTriangle3; newTriangle2.triangles[1] = newTriangle1; newTriangle2.triangles[2] = curTriangle2.triangles[(nEI + 1) % 3]; i = 0; if (newTriangle2.triangles[2] != null) { while (newTriangle2.edges[2] != newTriangle2.triangles[2].edges[i]) { ++i; } newTriangle2.triangles[2].triangles[i] = newTriangle2; } // Заполнение третьего треугольника newTriangle3.edges[0] = newTriangle0.edges[1]; newTriangle3.edges[1] = newTriangle2.edges[0]; newTriangle3.edges[2] = curTriangle2.edges[(nEI + 2) % 3]; newTriangle3.triangles[0] = newTriangle0; newTriangle3.triangles[1] = newTriangle2; newTriangle3.triangles[2] = curTriangle2.triangles[(nEI + 2) % 3]; i = 0; if (newTriangle3.triangles[2] != null) { while (newTriangle3.edges[2] != newTriangle3.triangles[2].edges[i]) { ++i; } newTriangle3.triangles[2].triangles[i] = newTriangle3; } // Заносим новые треугольники в множество для проверки условия Делоне trianglesForDelaunayCheck.Add(newTriangle0); trianglesForDelaunayCheck.Add(newTriangle1); trianglesForDelaunayCheck.Add(newTriangle2); trianglesForDelaunayCheck.Add(newTriangle3); edges.Remove(curTriangle.edges[e]); curTriangle = newTriangle3; } } pivotPoints.Add(curPoint); DelaunayBuilder(trianglesHashSet); } newPoints.Clear(); }
public void Compare(TextWriter writer) { var newFiles = new HashSet<string>(Directory.GetFiles(newDir, "*.dbc").Select((file) => new FileInfo(file).Name), StringComparer.InvariantCultureIgnoreCase); var oldFiles = new HashSet<string>(Directory.GetFiles(oldDir, "*.dbc").Select((file) => new FileInfo(file).Name), StringComparer.InvariantCultureIgnoreCase); var addedFiles = newFiles.Except(oldFiles); var removedFiles = oldFiles.Except(newFiles); var existingFiles = newFiles.Except(addedFiles); if (addedFiles.Count() > 0) { writer.WriteLine("{0} files have been added:", addedFiles.Count()); foreach (var file in addedFiles) { writer.WriteLine("\t" + new FileInfo(file).Name); var reader = new DBCReader(Path.Combine(newDir, file)); writer.WriteLine("\t\tColumn Count: " + reader.ColumnCount); writer.WriteLine("\t\tRow Count: " + reader.RecordCount); writer.WriteLine("\t\tHas Strings: " + reader.HasStrings); if (reader.IrregularColumnSize) { writer.WriteLine("\t\tColumn Size IRREGULAR!"); } } } if (removedFiles.Count() > 0) { writer.WriteLine("{0} files have been deprecated (not present anymore):", removedFiles.Count()); foreach (var file in addedFiles) { writer.WriteLine("\t" + new FileInfo(file).Name); } } writer.WriteLine(); writer.WriteLine("#######################################"); writer.WriteLine("Changes in files{0}:", maxTestAmount != int.MaxValue ? " - Testing with first " + maxTestAmount + " rows" : ""); writer.WriteLine(); foreach (var file in existingFiles) { var comparer = new DBCFileComparer(Path.Combine(oldDir, file), Path.Combine(newDir, file), maxTestAmount, null); if (comparer.NewReader.IrregularColumnSize) { writer.WriteLine("Skipping {0} (IRREGULAR Column Size)", file); } else if (comparer.NewReader.ColumnCount != comparer.OldReader.ColumnCount && (comparer.NewReader.ColumnCount < 2 || comparer.OldReader.ColumnCount < 2)) { writer.WriteLine("Skipping {0} (Only 1 column or less)", file); } else { comparer.Compare(writer); } } }
/// <summary> /// Create an error message to describe the primary issue in an invalid solution. /// </summary> /// <param name="solution">A partial solution from the resolver</param> /// <param name="availablePackages">all packages that were available for the solution</param> /// <param name="packagesConfig">packages already installed in the project</param> /// <param name="newPackageIds">new packages that are not already installed</param> /// <returns>A user friendly diagonstic message</returns> public static string GetDiagnosticMessage(IEnumerable <ResolverPackage> solution, IEnumerable <PackageDependencyInfo> availablePackages, IEnumerable <PackageReference> packagesConfig, IEnumerable <string> newPackageIds, IEnumerable <PackageSource> packageSources) { // remove empty and absent packages, absent packages cannot have error messages solution = solution.Where(package => package != null && !package.Absent); var allPackageIds = new HashSet <string>(solution.Select(package => package.Id), StringComparer.OrdinalIgnoreCase); var newPackageIdSet = new HashSet <string>(newPackageIds, StringComparer.OrdinalIgnoreCase); var installedPackageIds = new HashSet <string>(packagesConfig.Select(package => package.PackageIdentity.Id), StringComparer.OrdinalIgnoreCase); var requiredPackageIds = new HashSet <string>(StringComparer.OrdinalIgnoreCase); requiredPackageIds.UnionWith(newPackageIdSet); requiredPackageIds.UnionWith(installedPackageIds); var requiredPackages = solution.Where(package => requiredPackageIds.Contains(package.Id)).ToList(); // all new packages that are not already installed, and that aren't the primary target var newDependencyPackageIds = new HashSet <string>(allPackageIds.Except(requiredPackageIds), StringComparer.OrdinalIgnoreCase); // 1. find cases where the target package does not satisfy the dependency constraints foreach (var targetId in newPackageIdSet.OrderBy(id => id, StringComparer.OrdinalIgnoreCase)) { var brokenPackage = GetPackagesWithBrokenDependenciesOnId(targetId, requiredPackages) .OrderBy(package => package.Id, StringComparer.OrdinalIgnoreCase) .FirstOrDefault(); if (brokenPackage != null) { return(GetErrorMessage(targetId, solution, availablePackages, packagesConfig, packageSources)); } } // 2. find cases where the target package is missing dependencies foreach (var targetPackage in solution.Where(package => newPackageIdSet.Contains(package.Id)) .OrderBy(package => package.Id, StringComparer.OrdinalIgnoreCase)) { var brokenDependency = GetBrokenDependencies(targetPackage, solution) .OrderBy(package => package.Id, StringComparer.OrdinalIgnoreCase) .FirstOrDefault(); if (brokenDependency != null) { return(GetErrorMessage(brokenDependency.Id, solution, availablePackages, packagesConfig, packageSources)); } } // 3. find cases where an already installed package is missing a dependency // this may happen if an installed package was upgraded by the resolver foreach (var targetPackage in solution.Where(package => installedPackageIds.Contains(package.Id)) .OrderBy(package => package.Id, StringComparer.OrdinalIgnoreCase)) { var brokenDependency = GetBrokenDependencies(targetPackage, solution) .OrderBy(package => package.Id, StringComparer.OrdinalIgnoreCase) .FirstOrDefault(); if (brokenDependency != null) { return(GetErrorMessage(brokenDependency.Id, solution, availablePackages, packagesConfig, packageSources)); } } // 4. find cases where a new dependency has a missing dependency // to get the most useful error here, sort the packages by their distance from a required package foreach (var targetPackage in solution.Where(package => newDependencyPackageIds.Contains(package.Id)) .OrderBy(package => GetLowestDistanceFromTarget(package.Id, requiredPackageIds, solution)) .ThenBy(package => package.Id, StringComparer.OrdinalIgnoreCase)) { var brokenDependency = GetBrokenDependencies(targetPackage, solution) .OrderBy(package => package.Id, StringComparer.OrdinalIgnoreCase) .FirstOrDefault(); if (brokenDependency != null) { return(GetErrorMessage(brokenDependency.Id, solution, availablePackages, packagesConfig, packageSources)); } } // this should only get hit if the inputs are invalid, or the solution has no problems return(Strings.NoSolution); }
void VerifyChanges(ICmObject[] expectedNewbies, ICmObject[] expectedDirtballs, ICmObjectId[] expectedGoners, IUnitOfWorkService m_uowService) { var newbies = new HashSet<ICmObjectId>(); var dirtballs = new HashSet<ICmObjectOrSurrogate>(new ObjectSurrogateEquater()); var goners = new HashSet<ICmObjectId>(); m_uowService.GatherChanges(newbies, dirtballs, goners); var setNewbies = new HashSet<ICmObjectId>(from obj in expectedNewbies select obj.Id); Assert.That(newbies.Except(setNewbies), Is.Empty, "some unexpected newbies were found"); Assert.That(setNewbies.Except(newbies), Is.Empty, "some expected newbies were not found"); var setDirtballs = new HashSet<ICmObjectOrSurrogate>(expectedDirtballs.Cast<ICmObjectOrSurrogate>()); Assert.That(dirtballs.Except(setDirtballs), Is.Empty, "some unexpected dirtballs were found"); Assert.That(setDirtballs.Except(dirtballs), Is.Empty, "some expected dirtballs were not found"); var setGoners = new HashSet<ICmObjectId>(expectedGoners); Assert.That(goners.Except(setGoners), Is.Empty, "some unexpected goners were found"); Assert.That(setGoners.Except(goners), Is.Empty, "some expected goners were not found"); }
public TypeInfo CreateProxyType(System.Type baseType, IReadOnlyCollection <System.Type> baseInterfaces) { System.Type interfaceType = null; if (baseType == typeof(object)) { // Mapping option "proxy" allows to ask for using an interface, which switches the base type to object // and adds the interface to base interfaces set. // Avoids using object for naming the proxy, as otherwise all entities using the "proxy" option for // specifying an interface would have their proxies sharing the same full name. interfaceType = baseInterfaces.FirstOrDefault(i => i != typeof(INHibernateProxy)); } var typeName = $"{(interfaceType ?? baseType).Name}Proxy"; var assemblyName = $"{typeName}Assembly"; var moduleName = $"{typeName}Module"; var name = new AssemblyName(assemblyName); var assemblyBuilder = ProxyBuilderHelper.DefineDynamicAssembly(AppDomain.CurrentDomain, name); var moduleBuilder = ProxyBuilderHelper.DefineDynamicModule(assemblyBuilder, moduleName); const TypeAttributes typeAttributes = TypeAttributes.AutoClass | TypeAttributes.Class | TypeAttributes.Public | TypeAttributes.BeforeFieldInit; var interfaces = new HashSet <System.Type> { // Add the ISerializable interface so that it can be implemented typeof(ISerializable) }; interfaces.UnionWith(baseInterfaces); interfaces.UnionWith(baseInterfaces.SelectMany(i => i.GetInterfaces())); interfaces.UnionWith(baseType.GetInterfaces()); // Use the object as the base type // since we're not inheriting from any class type var parentType = baseType; if (baseType.IsInterface) { parentType = typeof(object); interfaces.Add(baseType); } #if NETFX || NETCOREAPP2_0_OR_GREATER var assemblyNamesToIgnoreAccessCheck = new[] { baseType } .Concat(interfaces).Where(i => !i.IsVisible) .Select(i => i.Assembly.GetName().Name) .Distinct(); foreach (var a in assemblyNamesToIgnoreAccessCheck) { ProxyBuilderHelper.GenerateInstanceOfIgnoresAccessChecksToAttribute(assemblyBuilder, a); } #else interfaces.RemoveWhere(i => !i.IsVisible); #endif var typeBuilder = moduleBuilder.DefineType(typeName, typeAttributes, parentType, interfaces.ToArray()); var lazyInitializerField = typeBuilder.DefineField("__lazyInitializer", LazyInitializerType, FieldAttributes.Private); var proxyInfoField = typeBuilder.DefineField("__proxyInfo", typeof(NHibernateProxyFactoryInfo), FieldAttributes.Private); ImplementConstructor(typeBuilder, parentType, lazyInitializerField, proxyInfoField); // Provide a custom implementation of ISerializable instead of redirecting it back to the interceptor foreach (var method in ProxyBuilderHelper.GetProxiableMethods(baseType, interfaces.Except(new[] { typeof(ISerializable) }))) { CreateProxiedMethod(typeBuilder, method, lazyInitializerField, parentType); } ProxyBuilderHelper.MakeProxySerializable(typeBuilder); ImplementDeserializationConstructor(typeBuilder, parentType); ImplementGetObjectData(typeBuilder, proxyInfoField, lazyInitializerField); var proxyType = typeBuilder.CreateTypeInfo(); ProxyBuilderHelper.Save(assemblyBuilder); return(proxyType); }
private string[] GetMissingModules(HashSet<string> existingDatabase) { var searchPaths = PythonTypeDatabase.GetCachedDatabaseSearchPaths(DatabasePath); if (searchPaths == null) { // No cached search paths means our database is out of date. return existingDatabase .Except(RequiredBuiltinModules) .OrderBy(name => name, StringComparer.InvariantCultureIgnoreCase) .ToArray(); } return PythonTypeDatabase.GetDatabaseExpectedModules(_config.Version, searchPaths) .SelectMany() .Select(mp => mp.ModuleName) .Concat(RequiredBuiltinModules) .Where(m => !existingDatabase.Contains(m)) .OrderBy(name => name, StringComparer.InvariantCultureIgnoreCase) .ToArray(); }
List <ILNode> FindLoops(HashSet <ControlFlowNode> scope, ControlFlowNode entryPoint, bool excludeEntryPoint) { List <ILNode> result = new List <ILNode>(); // Do not modify entry data scope = new HashSet <ControlFlowNode>(scope); Queue <ControlFlowNode> agenda = new Queue <ControlFlowNode>(); agenda.Enqueue(entryPoint); while (agenda.Count > 0) { ControlFlowNode node = agenda.Dequeue(); // If the node is a loop header if (scope.Contains(node) && node.DominanceFrontier.Contains(node) && (node != entryPoint || !excludeEntryPoint)) { HashSet <ControlFlowNode> loopContents = FindLoopContent(scope, node); // If the first expression is a loop condition ILBasicBlock basicBlock = (ILBasicBlock)node.UserData; ILExpression condExpr; ILLabel trueLabel; ILLabel falseLabel; // It has to be just brtrue - any preceding code would introduce goto if (basicBlock.MatchSingleAndBr(ILCode.Brtrue, out trueLabel, out condExpr, out falseLabel)) { ControlFlowNode trueTarget; labelToCfNode.TryGetValue(trueLabel, out trueTarget); ControlFlowNode falseTarget; labelToCfNode.TryGetValue(falseLabel, out falseTarget); // If one point inside the loop and the other outside if ((!loopContents.Contains(trueTarget) && loopContents.Contains(falseTarget)) || (loopContents.Contains(trueTarget) && !loopContents.Contains(falseTarget))) { loopContents.RemoveOrThrow(node); scope.RemoveOrThrow(node); // If false means enter the loop if (loopContents.Contains(falseTarget) || falseTarget == node) { // Negate the condition condExpr = new ILExpression(ILCode.LogicNot, null, condExpr); ILLabel tmp = trueLabel; trueLabel = falseLabel; falseLabel = tmp; } ControlFlowNode postLoopTarget; labelToCfNode.TryGetValue(falseLabel, out postLoopTarget); if (postLoopTarget != null) { // Pull more nodes into the loop HashSet <ControlFlowNode> postLoopContents = FindDominatedNodes(scope, postLoopTarget); var pullIn = scope.Except(postLoopContents).Where(n => node.Dominates(n)); loopContents.UnionWith(pullIn); } // Use loop to implement the brtrue basicBlock.Body.RemoveTail(ILCode.Brtrue, ILCode.Br); basicBlock.Body.Add(new ILWhileLoop() { Condition = condExpr, BodyBlock = new ILBlock() { EntryGoto = new ILExpression(ILCode.Br, trueLabel), Body = FindLoops(loopContents, node, false) } }); basicBlock.Body.Add(new ILExpression(ILCode.Br, falseLabel)); result.Add(basicBlock); scope.ExceptWith(loopContents); } } // Fallback method: while(true) if (scope.Contains(node)) { result.Add(new ILBasicBlock() { Body = new List <ILNode>() { new ILLabel() { Name = "Loop_" + (nextLabelIndex++) }, new ILWhileLoop() { BodyBlock = new ILBlock() { EntryGoto = new ILExpression(ILCode.Br, (ILLabel)basicBlock.Body.First()), Body = FindLoops(loopContents, node, true) } }, }, }); scope.ExceptWith(loopContents); } } // Using the dominator tree should ensure we find the the widest loop first foreach (var child in node.DominatorTreeChildren) { agenda.Enqueue(child); } } // Add whatever is left foreach (var node in scope) { result.Add((ILNode)node.UserData); } scope.Clear(); return(result); }
private void GetReferences(JObject lockFile) { var target = GetTargetOrAttemptFallback(lockFile, needsRuntimeIdentifier: false); var frameworkReferences = new HashSet<string>(StringComparer.OrdinalIgnoreCase); var fileNamesOfRegularReferences = new HashSet<string>(StringComparer.OrdinalIgnoreCase); foreach (var package in target) { var packageNameParts = package.Key.Split('/'); var packageName = packageNameParts[0]; var packageVersion = packageNameParts[1]; Log.LogMessageFromResources(MessageImportance.Low, "ResolvedReferencesFromPackage", packageName); foreach (var referenceItem in CreateItems(packageName, packageVersion, package.Value, NuGetAssetTypeCompile)) { _references.Add(referenceItem); fileNamesOfRegularReferences.Add(Path.GetFileNameWithoutExtension(referenceItem.ItemSpec)); } if (IncludeFrameworkReferences) { var frameworkAssembliesArray = package.Value["frameworkAssemblies"] as JArray; if (frameworkAssembliesArray != null) { foreach (var frameworkAssembly in frameworkAssembliesArray.OfType<JToken>()) { frameworkReferences.Add((string)frameworkAssembly); } } } } foreach (var frameworkReference in frameworkReferences.Except(fileNamesOfRegularReferences, StringComparer.OrdinalIgnoreCase)) { _references.Add(new TaskItem(frameworkReference)); } }
List<ILNode> FindLoops(HashSet<ControlFlowNode> scope, ControlFlowNode entryPoint, bool excludeEntryPoint) { List<ILNode> result = new List<ILNode>(); // Do not modify entry data scope = new HashSet<ControlFlowNode>(scope); Queue<ControlFlowNode> agenda = new Queue<ControlFlowNode>(); agenda.Enqueue(entryPoint); while(agenda.Count > 0) { ControlFlowNode node = agenda.Dequeue(); // If the node is a loop header if (scope.Contains(node) && node.DominanceFrontier.Contains(node) && (node != entryPoint || !excludeEntryPoint)) { HashSet<ControlFlowNode> loopContents = FindLoopContent(scope, node); // If the first expression is a loop condition ILBasicBlock basicBlock = (ILBasicBlock)node.UserData; ILExpression condExpr; ILLabel trueLabel; ILLabel falseLabel; // It has to be just brtrue - any preceding code would introduce goto if(basicBlock.MatchSingleAndBr(ILCode.Brtrue, out trueLabel, out condExpr, out falseLabel)) { ControlFlowNode trueTarget; labelToCfNode.TryGetValue(trueLabel, out trueTarget); ControlFlowNode falseTarget; labelToCfNode.TryGetValue(falseLabel, out falseTarget); // If one point inside the loop and the other outside if ((!loopContents.Contains(trueTarget) && loopContents.Contains(falseTarget)) || (loopContents.Contains(trueTarget) && !loopContents.Contains(falseTarget)) ) { loopContents.RemoveOrThrow(node); scope.RemoveOrThrow(node); // If false means enter the loop if (loopContents.Contains(falseTarget) || falseTarget == node) { // Negate the condition condExpr = new ILExpression(ILCode.LogicNot, null, condExpr); ILLabel tmp = trueLabel; trueLabel = falseLabel; falseLabel = tmp; } ControlFlowNode postLoopTarget; labelToCfNode.TryGetValue(falseLabel, out postLoopTarget); if (postLoopTarget != null) { // Pull more nodes into the loop HashSet<ControlFlowNode> postLoopContents = FindDominatedNodes(scope, postLoopTarget); var pullIn = scope.Except(postLoopContents).Where(n => node.Dominates(n)); loopContents.UnionWith(pullIn); } // Use loop to implement the brtrue basicBlock.Body.RemoveTail(ILCode.Brtrue, ILCode.Br); basicBlock.Body.Add(new ILWhileLoop() { Condition = condExpr, BodyBlock = new ILBlock() { EntryGoto = new ILExpression(ILCode.Br, trueLabel), Body = FindLoops(loopContents, node, false) } }); basicBlock.Body.Add(new ILExpression(ILCode.Br, falseLabel)); result.Add(basicBlock); scope.ExceptWith(loopContents); } } // Fallback method: while(true) if (scope.Contains(node)) { result.Add(new ILBasicBlock() { Body = new List<ILNode>() { new ILLabel() { Name = "Loop_" + (nextLabelIndex++) }, new ILWhileLoop() { BodyBlock = new ILBlock() { EntryGoto = new ILExpression(ILCode.Br, (ILLabel)basicBlock.Body.First()), Body = FindLoops(loopContents, node, true) } }, }, }); scope.ExceptWith(loopContents); } } // Using the dominator tree should ensure we find the the widest loop first foreach(var child in node.DominatorTreeChildren) { agenda.Enqueue(child); } } // Add whatever is left foreach(var node in scope) { result.Add((ILNode)node.UserData); } scope.Clear(); return result; }
void TestFindReferences(IEntity entity) { if (IgnoreEntity(entity)) { return; } FindReferences fr = new FindReferences(); fr.FindTypeReferencesEvenIfAliased = true; Stopwatch w = new Stopwatch(); var searchScopes = fr.GetSearchScopes(entity); foreach (var project in solution.Projects) { w.Restart(); HashSet <AstNode> foundReferences = new HashSet <AstNode>(); var interestingFiles = new HashSet <CSharpFile>(); foreach (var searchScope in searchScopes) { foreach (var unresolvedFile in fr.GetInterestingFiles(searchScope, project.Compilation)) { var file = project.Files.Single(f => f.FileName == unresolvedFile.FileName); Debug.Assert(file.UnresolvedTypeSystemForFile == unresolvedFile); // Skip file if it doesn't contain the search term if (searchScope.SearchTerm != null && file.OriginalText.IndexOf(searchScope.SearchTerm, StringComparison.Ordinal) < 0) { continue; } interestingFiles.Add(file); } } foreach (var file in interestingFiles) { fr.FindReferencesInFile(searchScopes, file.UnresolvedTypeSystemForFile, file.SyntaxTree, project.Compilation, delegate(AstNode node, ResolveResult result) { foundReferences.Add(node); }, CancellationToken.None); } w.Stop(); if (timings.ContainsKey(entity.EntityType)) { timings[entity.EntityType] += w.Elapsed; } else { timings[entity.EntityType] = w.Elapsed; } IEntity importedEntity = project.Compilation.Import(entity); HashSet <AstNode> expectedReferences; if (importedEntity == null || !referenceDict.TryGetValue(importedEntity, out expectedReferences)) { if (foundReferences.Any()) { // There aren't any expected references stored, but we found some references anyways: Console.WriteLine(); Console.WriteLine("Entity not in reference dictionary: " + entity); } return; } if (foundReferences.Except(expectedReferences).Any()) { Console.WriteLine(); Console.WriteLine("Reference mismatch for " + entity + ":"); var n = foundReferences.Except(expectedReferences).First(); Console.WriteLine("Found unexpected reference " + n + " (" + n.GetRegion() + ")"); } if (expectedReferences.Except(foundReferences).Any()) { Console.WriteLine(); Console.WriteLine("Reference mismatch for " + entity + ":"); var n = expectedReferences.Except(foundReferences).First(); Console.WriteLine("Did not find expected reference " + n + " (" + n.GetRegion() + ")"); } } if (entityCount.ContainsKey(entity.EntityType)) { entityCount[entity.EntityType]++; } else { entityCount[entity.EntityType] = 1; } }
private void GetReferences(JObject lockFile) { var target = GetTargetOrAttemptFallback(lockFile, needsRuntimeIdentifier: false); var frameworkReferences = new HashSet<string>(StringComparer.OrdinalIgnoreCase); var fileNamesOfRegularReferences = new HashSet<string>(StringComparer.OrdinalIgnoreCase); foreach (var package in GetPackagesFromTarget(lockFile, target)) { foreach (var referenceItem in CreateItems(package, NuGetAssetTypeCompile, includePdbs: false)) { _references.Add(referenceItem); fileNamesOfRegularReferences.Add(Path.GetFileNameWithoutExtension(referenceItem.ItemSpec)); } if (IncludeFrameworkReferences) { var frameworkAssembliesArray = package.TargetObject["frameworkAssemblies"] as JArray; if (frameworkAssembliesArray != null) { foreach (var frameworkAssembly in frameworkAssembliesArray.OfType<JToken>()) { frameworkReferences.Add((string)frameworkAssembly); } } } } foreach (var frameworkReference in frameworkReferences.Except(fileNamesOfRegularReferences, StringComparer.OrdinalIgnoreCase)) { var item = new TaskItem(frameworkReference); item.SetMetadata(NuGetIsFrameworkReference, "true"); item.SetMetadata(NuGetSourceType, NuGetSourceType_Package); _references.Add(item); } }
void TestFindReferences(IEntity entity) { if (IgnoreEntity(entity)) return; FindReferences fr = new FindReferences(); fr.FindTypeReferencesEvenIfAliased = true; Stopwatch w = new Stopwatch(); var searchScopes = fr.GetSearchScopes(entity); foreach (var project in solution.Projects) { w.Restart(); HashSet<AstNode> foundReferences = new HashSet<AstNode>(); var interestingFiles = new HashSet<CSharpFile>(); foreach (var searchScope in searchScopes) { foreach (var unresolvedFile in fr.GetInterestingFiles(searchScope, project.Compilation)) { var file = project.Files.Single(f => f.FileName == unresolvedFile.FileName); Debug.Assert(file.UnresolvedTypeSystemForFile == unresolvedFile); // Skip file if it doesn't contain the search term if (searchScope.SearchTerm != null && file.OriginalText.IndexOf(searchScope.SearchTerm, StringComparison.Ordinal) < 0) continue; interestingFiles.Add(file); } } foreach (var file in interestingFiles) { fr.FindReferencesInFile(searchScopes, file.UnresolvedTypeSystemForFile, file.SyntaxTree, project.Compilation, delegate(AstNode node, ResolveResult result) { foundReferences.Add(node); }, CancellationToken.None); } w.Stop(); if (timings.ContainsKey(entity.EntityType)) { timings[entity.EntityType] += w.Elapsed; } else { timings[entity.EntityType] = w.Elapsed; } IEntity importedEntity = project.Compilation.Import(entity); HashSet<AstNode> expectedReferences; if (importedEntity == null || !referenceDict.TryGetValue(importedEntity, out expectedReferences)) { if (foundReferences.Any()) { // There aren't any expected references stored, but we found some references anyways: Console.WriteLine(); Console.WriteLine("Entity not in reference dictionary: " + entity); } return; } if (foundReferences.Except(expectedReferences).Any()) { Console.WriteLine(); Console.WriteLine("Reference mismatch for " + entity + ":"); var n = foundReferences.Except(expectedReferences).First(); Console.WriteLine("Found unexpected reference " + n + " (" + n.GetRegion() + ")"); } if (expectedReferences.Except(foundReferences).Any()) { Console.WriteLine(); Console.WriteLine("Reference mismatch for " + entity + ":"); var n = expectedReferences.Except(foundReferences).First(); Console.WriteLine("Did not find expected reference " + n + " (" + n.GetRegion() + ")"); } } if (entityCount.ContainsKey(entity.EntityType)) { entityCount[entity.EntityType]++; } else { entityCount[entity.EntityType] = 1; } }
private IEnumerable <DatabaseTable> GetTables(DbConnection connection, IEnumerable <string> tables) { var tablesToSelect = new HashSet <string>(tables.ToList(), StringComparer.OrdinalIgnoreCase); var selectedTables = new HashSet <string>(StringComparer.OrdinalIgnoreCase); using (var command = connection.CreateCommand()) { command.CommandText = new StringBuilder() .AppendLine("SELECT \"name\"") .AppendLine("FROM \"sqlite_master\"") .Append("WHERE \"type\" = 'table' AND instr(\"name\", 'sqlite_') <> 1 AND \"name\" NOT IN ('") .Append(HistoryRepository.DefaultTableName) .Append("', 'ElementaryGeometries', 'geometry_columns', 'geometry_columns_auth', ") .Append("'geometry_columns_field_infos', 'geometry_columns_statistics', 'geometry_columns_time', ") .Append("'spatial_ref_sys', 'spatial_ref_sys_aux', 'SpatialIndex', 'spatialite_history', ") .Append("'sql_statements_log', 'views_geometry_columns', 'views_geometry_columns_auth', ") .Append("'views_geometry_columns_field_infos', 'views_geometry_columns_statistics', ") .Append("'virts_geometry_columns', 'virts_geometry_columns_auth', ") .AppendLine("'virts_geometry_columns_field_infos', 'virts_geometry_columns_statistics');") .ToString(); using (var reader = command.ExecuteReader()) { while (reader.Read()) { var name = reader.GetString(0); if (!AllowsTable(tablesToSelect, selectedTables, name)) { continue; } _logger.TableFound(name); var table = new DatabaseTable { Name = name }; foreach (var column in GetColumns(connection, name)) { column.Table = table; table.Columns.Add(column); } var primaryKey = GetPrimaryKey(connection, name, table.Columns); if (primaryKey != null) { primaryKey.Table = table; table.PrimaryKey = primaryKey; } foreach (var uniqueConstraints in GetUniqueConstraints(connection, name, table.Columns)) { uniqueConstraints.Table = table; table.UniqueConstraints.Add(uniqueConstraints); } foreach (var index in GetIndexes(connection, name, table.Columns)) { index.Table = table; table.Indexes.Add(index); } yield return(table); } } } foreach (var table in tablesToSelect.Except(selectedTables, StringComparer.OrdinalIgnoreCase)) { _logger.MissingTableWarning(table); } }
private string GetNewClonedDirectoryOrNulIfNoneCloned(IEnumerable<string> directoriesThatExistedBeforeCloning) { var dirs = new HashSet<string>(Directory.GetDirectories(_repoHoldingFolder)); return dirs.Except(directoriesThatExistedBeforeCloning).FirstOrDefault(); }
public override T[] Rank() { //Tally var d = CalcD(); //Sort var pairs = calcPairs(d); Tuple <T, T>[] keys = pairs.Keys.ToArray(); var sorted = keys.OrderByDescending(x => pairs[x].majority).ThenBy(x => pairs[x].opposition).ToArray(); //Lock List <Tuple <T, T> > locked = new List <Tuple <T, T> >(); foreach (var pair in sorted) { //Check for circularity //This is done by seeing if the loser is already locked in as a winner. //If so, skip this record if this pair's majority is greater than the loser's already locked in. bool circularity = false; foreach (var l in locked) { if (pair.Item2.CompareTo(l.Item1) == 0) { if (pairs[pair].majority > pairs[l].majority) { circularity = true; break; } } } if (!circularity) { locked.Add(pair); } } List <T> ranked = new List <T>(); //Do the following until you've exhausted the list while (locked.Count > 0) { //Find source of graph //The source node has no entry points (find the winner who never lost) HashSet <T> winners = new HashSet <T>(locked.Select(x => x.Item1)); HashSet <T> losers = new HashSet <T>(locked.Select(x => x.Item2)); T[] sources = winners.Except(losers).ToArray(); if (sources.Length != 1) { throw new InvalidOperationException("There was more than one source of the graph. This should never happen."); } T source = sources[0]; //Add that winner to the rankings ranked.Add(source); //Now remove all entries from `locked` where `source` won and find the next source locked.RemoveAll(x => x.Item1.Equals(source)); } //Check for missing candidates. //If found, add them to the bottom of the ranking in the order provided to the constructor. //Until I can expand the test cases, I'm restricting this to only missing one candidate. //This happens in unanimous cases, at least. Trace.Assert(candidates.Count - ranked.Count <= 1); if (ranked.Count < candidates.Count) { foreach (var c in candidates) { if (!ranked.Contains(c)) { ranked.Add(c); } } } return(ranked.ToArray()); }
public static Dictionary <GUIMod, string> ComputeConflictsFromModList(IRegistryQuerier registry, IEnumerable <ModChange> change_set, KspVersionCriteria ksp_version) { var modules_to_install = new HashSet <string>(); var modules_to_remove = new HashSet <string>(); var options = new RelationshipResolverOptions { without_toomanyprovides_kraken = true, proceed_with_inconsistencies = true, without_enforce_consistency = true, with_recommends = false }; foreach (var change in change_set) { switch (change.ChangeType) { case GUIModChangeType.None: break; case GUIModChangeType.Install: modules_to_install.Add(change.Mod.identifier); break; case GUIModChangeType.Remove: modules_to_remove.Add(change.Mod.identifier); break; case GUIModChangeType.Update: break; case GUIModChangeType.Replace: ModuleReplacement repl = registry.GetReplacement(change.Mod, ksp_version); if (repl != null) { modules_to_remove.Add(repl.ToReplace.identifier); modules_to_install.Add(repl.ReplaceWith.identifier); } break; default: throw new ArgumentOutOfRangeException(); } } // Only check mods that would exist after the changes are made. IEnumerable <CkanModule> installed = registry.InstalledModules.Where( im => !modules_to_remove.Contains(im.Module.identifier) ).Select(im => im.Module); // Convert ONLY modules_to_install with CkanModule.FromIDandVersion, // because it may not find already-installed modules. IEnumerable <CkanModule> mods_to_check = installed.Union( modules_to_install.Except(modules_to_remove).Select( name => CkanModule.FromIDandVersion(registry, name, ksp_version) ) ); var resolver = new RelationshipResolver( mods_to_check, change_set.Where(ch => ch.ChangeType == GUIModChangeType.Remove) .Select(ch => ch.Mod), options, registry, ksp_version ); return(resolver.ConflictList.ToDictionary(item => new GUIMod(item.Key, registry, ksp_version), item => item.Value)); }
/// <summary> /// アセットバンドルをビルドします. /// </summary> /// <returns>ビルドに成功していればtrueを、それ以外はfalseを返す.</returns> public bool BuildAssetBundles() { try { AssetBundleManifest oldManifest = null; var manifestPath = Path.Combine(BundleOutputPath, ActualBuildTarget.ToString()); if (File.Exists(manifestPath)) { var manifestAssetBundle = AssetBundle.LoadFromFile(manifestPath); oldManifest = manifestAssetBundle ? manifestAssetBundle.LoadAsset <AssetBundleManifest>("assetbundlemanifest") : null; } UnityEngine.Debug.Log(kLogType + "BuildAssetBundles is started."); Directory.CreateDirectory(BundleOutputPath); var opt = (BuildAssetBundleOptions)_bundleOptions | BuildAssetBundleOptions.DeterministicAssetBundle; var newManifest = BuildPipeline.BuildAssetBundles(BundleOutputPath, opt, ActualBuildTarget); var sb = new StringBuilder(kLogType + "AssetBundle report"); string[] array; if (oldManifest) { // 差分をログ出力. var oldBundles = new HashSet <string>(oldManifest ? oldManifest.GetAllAssetBundles() : new string[] { }); var newBundles = new HashSet <string>(newManifest.GetAllAssetBundles()); // 新規 array = newBundles.Except(oldBundles).ToArray(); sb.AppendFormat("\n[Added]: {0}\n", array.Length); foreach (string bundleName in array) { sb.AppendLine(" > " + bundleName); } // 削除 array = oldBundles.Except(newBundles).ToArray(); sb.AppendFormat("\n[Deleted]: {0}\n", array.Length); foreach (string bundleName in array) { sb.AppendLine(" > " + bundleName); } // 更新 array = oldBundles .Intersect(newBundles) .Where(x => !Hash128.Equals(oldManifest.GetAssetBundleHash(x), newManifest.GetAssetBundleHash(x))) .ToArray(); sb.AppendFormat("\n[Updated]: {0}\n", array.Length); foreach (string bundleName in array) { sb.AppendLine(" > " + bundleName); } } else { // 新規 array = newManifest.GetAllAssetBundles(); sb.AppendFormat("\n[Added]: {0}\n", array.Length); foreach (string bundleName in array) { sb.AppendLine(" > " + bundleName); } } UnityEngine.Debug.Log(sb); if (_copyToStreamingAssets) { string copyPath = Path.Combine(Application.streamingAssetsPath, BundleOutputPath); Directory.CreateDirectory(copyPath); if (Directory.Exists(copyPath)) { FileUtil.DeleteFileOrDirectory(copyPath); } FileUtil.CopyFileOrDirectory(BundleOutputPath, copyPath); } UnityEngine.Debug.Log(kLogType + "BuildAssetBundles is finished successfuly."); return(true); } catch (System.Exception e) { UnityEngine.Debug.LogError(kLogType + "BuildAssetBundles is failed : " + e.Message); return(false); } }