private static double FindMinimalCost(HashSet<ISet<int>> allTrees) { // Kruskal -> Sorting edges by their weight Array.Sort(paths, (a, b) => a.Item3.CompareTo(b.Item3)); double result = 0; foreach (var path in paths) { var tree1 = allTrees.Where(tree => tree.Contains(path.Item1)).First(); var tree2 = allTrees.Where(tree => tree.Contains(path.Item2)).First(); // Elements are in same tree if (tree1.Equals(tree2)) continue; result += path.Item3; tree1.UnionWith(tree2); allTrees.Remove(tree2); // Small optimization if (allTrees.Count == 1) break; } return result; }
public override HttpActionDescriptor SelectAction(HttpControllerContext controllerContext) { object actionName, subactionName; var hasActionName = controllerContext.RouteData.Values.TryGetValue("action", out actionName); var hasSubActionName = controllerContext.RouteData.Values.TryGetValue("subaction", out subactionName); var method = controllerContext.Request.Method; var allMethods = controllerContext.ControllerDescriptor.ControllerType.GetMethods( BindingFlags.Instance | BindingFlags.Public); var validMethods = Array.FindAll(allMethods, IsValidActionMethod); var actionDescriptors = new HashSet<ReflectedHttpActionDescriptor>(); foreach ( var actionDescriptor in validMethods.Select(m => new ReflectedHttpActionDescriptor(controllerContext.ControllerDescriptor, m))) { actionDescriptors.Add(actionDescriptor); this.actionParams.Add( actionDescriptor, actionDescriptor.ActionBinding.ParameterBindings.Where( b => !b.Descriptor.IsOptional && b.Descriptor.ParameterType.UnderlyingSystemType.IsPrimitive) .Select(b => b.Descriptor.Prefix ?? b.Descriptor.ParameterName) .ToArray()); } IEnumerable<ReflectedHttpActionDescriptor> actionsFoundSoFar; if (hasSubActionName) { actionsFoundSoFar = actionDescriptors.Where( i => i.ActionName.ToLowerInvariant() == subactionName.ToString().ToLowerInvariant() && i.SupportedHttpMethods.Contains(method)).ToArray(); } else if (hasActionName) { actionsFoundSoFar = actionDescriptors.Where( i => i.ActionName.ToLowerInvariant() == actionName.ToString().ToLowerInvariant() && i.SupportedHttpMethods.Contains(method)).ToArray(); } else { actionsFoundSoFar = actionDescriptors.Where( i => i.ActionName.ToLowerInvariant().Contains(method.ToString().ToLowerInvariant()) && i.SupportedHttpMethods.Contains(method)).ToArray(); } var actionsFound = FindActionUsingRouteAndQueryParameters(controllerContext, actionsFoundSoFar); if (actionsFound == null || !actionsFound.Any()) throw new HttpResponseException( controllerContext.Request.CreateErrorResponse(HttpStatusCode.NotFound, "Cannot find a matching action.")); if (actionsFound.Count() > 1) throw new HttpResponseException( controllerContext.Request.CreateErrorResponse(HttpStatusCode.Ambiguous, "Multiple matches found.")); return actionsFound.FirstOrDefault(); }
public void ArrangeGameObjects(string[] warriorCharacters) { InitGrid(); Warrior[] warriors = GetWarriors(warriorCharacters); _warriorSquares = new Square<Warrior>[warriors.Length]; HashSet<Square<GameObject>> squaresTried = new HashSet<Square<GameObject>>(); Square<GameObject> squareToTry; for (int i = 0; i < warriors.Length; i++) { do { squareToTry = _grid[ _randomGenerator.Next(0, GridSize), _randomGenerator.Next(0, GridSize)]; } while ( squaresTried.Contains(squareToTry) || IsWithin(2, squareToTry, squaresTried.Where(s => s.GameObject is Warrior))); squareToTry.GameObject = warriors[i]; _warriorSquares[i] = new Square<Warrior>(squareToTry.Row, squareToTry.Col, warriors[i]); squaresTried.Add(squareToTry); } for (int i = 0; i < ApplesCount; i++) { do { squareToTry = _grid[ _randomGenerator.Next(0, GridSize), _randomGenerator.Next(0, GridSize)]; } while ( squaresTried.Contains(squareToTry) || IsWithin(1, squareToTry, squaresTried.Where(s => s.GameObject is Fruit))); squareToTry.GameObject = new Apple(); squaresTried.Add(squareToTry); } for (int i = 0; i < PearsCount; i++) { do { squareToTry = _grid[ _randomGenerator.Next(0, GridSize), _randomGenerator.Next(0, GridSize)]; } while ( squaresTried.Contains(squareToTry) || IsWithin(1, squareToTry, squaresTried.Where(s => s.GameObject is Fruit))); squareToTry.GameObject = new Pear(); squaresTried.Add(squareToTry); } }
/// <summary> /// Creates a <see cref="RoutingTablesAdaptersList"/> /// </summary> /// <param name="previousAdapterList">A complete list of all the adapters that existed before.</param> /// <param name="currentAdapterList">A complete list of all the adapters that exist now</param> public RoutingTablesAdaptersList(HashSet<IAdapter> previousAdapterList, HashSet<IAdapter> currentAdapterList) { NewAdapter = new List<IAdapter>(); ExistingAdapter = new List<IAdapter>(); OldAdapter = new List<IAdapter>(); NewAndExistingAdapters = new List<IAdapter>(); NewAndExistingAdapters.AddRange(currentAdapterList); NewAdapter.AddRange(currentAdapterList.Where(x => !previousAdapterList.Contains(x))); OldAdapter.AddRange(previousAdapterList.Where(x => !currentAdapterList.Contains(x))); ExistingAdapter.AddRange(currentAdapterList.Where(x => previousAdapterList.Contains(x))); }
static void Main() { int N = int.Parse(Console.ReadLine()); var edges = new Tuple<int, int, int>[N]; var nodes = new HashSet<int>(); for (int i = 0; i < N; i++) { var input = Console.ReadLine().Split(' ').Select(int.Parse).ToArray(); edges[i] = new Tuple<int, int, int>(input[0], input[1], input[2]); nodes.Add(input[0]); nodes.Add(input[1]); } var allTrees = new HashSet<HashSet<int>>(); // Represend each node as tree foreach (var node in nodes) { var tree = new HashSet<int>(); tree.Add(node); allTrees.Add(tree); } // Sorting edges by their weight Array.Sort(edges, (a, b) => a.Item3.CompareTo(b.Item3)); double result = 0; foreach (var edge in edges) { var tree1 = allTrees.Where(tree => tree.Contains(edge.Item1)).First(); var tree2 = allTrees.Where(tree => tree.Contains(edge.Item2)).First(); // Elements are in same tree if (tree1.Equals(tree2)) continue; result += edge.Item3; tree1.UnionWith(tree2); allTrees.Remove(tree2); // Small optimization if (allTrees.Count == 1) break; } Console.WriteLine(result); }
public void CaseInsensitiveComparison() { List<GitInstallation> list = new List<GitInstallation> { new GitInstallation(@"C:\Program Files (x86)\Git", KnownGitDistribution.GitForWindows32v1), new GitInstallation(@"C:\Program Files (x86)\Git", KnownGitDistribution.GitForWindows32v2), new GitInstallation(@"C:\Program Files\Git", KnownGitDistribution.GitForWindows32v1), new GitInstallation(@"C:\Program Files\Git", KnownGitDistribution.GitForWindows32v2), new GitInstallation(@"C:\Program Files\Git", KnownGitDistribution.GitForWindows64v2), // ToLower versions new GitInstallation(@"C:\Program Files (x86)\Git".ToLower(), KnownGitDistribution.GitForWindows32v1), new GitInstallation(@"C:\Program Files (x86)\Git".ToLower(), KnownGitDistribution.GitForWindows32v2), new GitInstallation(@"C:\Program Files\Git".ToLower(), KnownGitDistribution.GitForWindows32v1), new GitInstallation(@"C:\Program Files\Git".ToLower(), KnownGitDistribution.GitForWindows32v2), new GitInstallation(@"C:\Program Files\Git".ToLower(), KnownGitDistribution.GitForWindows64v2), // ToUpper versions new GitInstallation(@"C:\Program Files (x86)\Git".ToUpper(), KnownGitDistribution.GitForWindows32v1), new GitInstallation(@"C:\Program Files (x86)\Git".ToUpper(), KnownGitDistribution.GitForWindows32v2), new GitInstallation(@"C:\Program Files\Git".ToUpper(), KnownGitDistribution.GitForWindows32v1), new GitInstallation(@"C:\Program Files\Git".ToUpper(), KnownGitDistribution.GitForWindows32v2), new GitInstallation(@"C:\Program Files\Git".ToUpper(), KnownGitDistribution.GitForWindows64v2), }; HashSet<GitInstallation> set = new HashSet<GitInstallation>(list); Assert.AreEqual(15, list.Count); Assert.AreEqual(5, set.Count); Assert.AreEqual(6, list.Where(x => x.Version == KnownGitDistribution.GitForWindows32v1).Count()); Assert.AreEqual(6, list.Where(x => x.Version == KnownGitDistribution.GitForWindows32v2).Count()); Assert.AreEqual(3, list.Where(x => x.Version == KnownGitDistribution.GitForWindows64v2).Count()); Assert.AreEqual(2, set.Where(x => x.Version == KnownGitDistribution.GitForWindows32v1).Count()); Assert.AreEqual(2, set.Where(x => x.Version == KnownGitDistribution.GitForWindows32v2).Count()); Assert.AreEqual(1, set.Where(x => x.Version == KnownGitDistribution.GitForWindows64v2).Count()); foreach (var v in Enum.GetValues(typeof(KnownGitDistribution))) { KnownGitDistribution kgd = (KnownGitDistribution)v; var a = list.Where(x => x.Version == kgd); Assert.IsTrue(a.All(x => x != a.First() || GitInstallation.PathComparer.Equals(x.Cmd, a.First().Cmd))); Assert.IsTrue(a.All(x => x != a.First() || GitInstallation.PathComparer.Equals(x.Config, a.First().Config))); Assert.IsTrue(a.All(x => x != a.First() || GitInstallation.PathComparer.Equals(x.Git, a.First().Git))); Assert.IsTrue(a.All(x => x != a.First() || GitInstallation.PathComparer.Equals(x.Libexec, a.First().Libexec))); Assert.IsTrue(a.All(x => x != a.First() || GitInstallation.PathComparer.Equals(x.Sh, a.First().Sh))); } }
private void UpdatePrefix(HashSet<string> items, string oldPrefix, string newPrefix) { var itemsToUpdate = items.Where(a => a.StartsWithIgnoreCase(oldPrefix)).ToList(); foreach (var item in itemsToUpdate) { items.Remove(item); items.Add(newPrefix + item.Substring(oldPrefix.Length)); } }
public IEnumerable<string> GetNames(SyntaxNode node) { _leftSideOfNames = new HashSet<string>(); _fullNames = new HashSet<string>(); Visit(node); return _leftSideOfNames.Where(each => _fullNames.Contains(each)).ToArray(); }
public void hash() { decimal beløb1 = 11.5M; decimal beløb2 = beløb1 / 3M; beløb2 *= 3M; /* True 40270000 BFD9000F True False False True*/ Console.WriteLine(beløb1 == beløb2); // (A) Console.WriteLine(beløb1.GetHashCode().ToString("X8")); // (B) Console.WriteLine(beløb2.GetHashCode().ToString("X8")); // (C) var list = new List<decimal> { beløb1, }; Console.WriteLine(list.Contains(beløb2)); // (D) var mgd = new HashSet<decimal> { beløb1, }; Console.WriteLine(mgd.Contains(beløb2)); // (E) Console.WriteLine(mgd.AsEnumerable().Contains(beløb2)); // (F) Console.WriteLine(mgd.Where(d => true).Contains(beløb2)); // (G) }
private void UpdateCategory(ISession session, CategoryData categoryData, Webpage parent, HashSet<CategoryData> allData, NopImportContext nopImportContext) { CategoryData data = categoryData; var suggestParams = new SuggestParams { DocumentType = typeof(Category).FullName, PageName = data.Name, UseHierarchy = true }; var category = new Category { Name = data.Name, UrlSegment = string.IsNullOrWhiteSpace(data.Url) ? _webpageUrlService.Suggest(parent, suggestParams) : data.Url, Parent = parent, CategoryAbstract = data.Abstract.LimitCharacters(500), PublishOn = data.Published ? CurrentRequestData.Now.Date : (DateTime?)null, RevealInNavigation = true }; var mediaFile = nopImportContext.FindNew<MediaFile>(data.PictureId); if (mediaFile != null) { category.FeatureImage = mediaFile.FileUrl; } session.Save(category); nopImportContext.AddEntry(data.Id, category); List<CategoryData> children = allData.Where(d => d.ParentId == data.Id).ToList(); foreach (CategoryData child in children) { UpdateCategory(session, child, category, allData, nopImportContext); } }
public IEnumerable<IReceipt> Receive(int maximumCount) { if (Directory.Exists(_directoryName)) { HashSet<string> fileNames = new HashSet<string>(Directory.GetFiles(_directoryName)); string[] notReceivedFileNames = fileNames.Where((n) => Path.HasExtension(n) && !fileNames.Contains(Path.Combine(Path.GetDirectoryName(n), Path.GetFileNameWithoutExtension(n)))).ToArray(); for (int i = 0; i < Math.Min(maximumCount, notReceivedFileNames.Length); i++) { TimeSpan span = DateTime.UtcNow.Subtract(File.GetLastWriteTimeUtc(notReceivedFileNames[i])); if (span.TotalMilliseconds > _visibilityDelayMilliseconds) { string message; using (Stream stream = new FileStream(notReceivedFileNames[i], FileMode.Open, FileAccess.Read)) { using (StreamReader reader = new StreamReader(stream, Encoding.UTF8)) { message = reader.ReadToEnd(); } } yield return new Receipt(message, notReceivedFileNames[i]); } } } }
/// <summary> /// - Removes empty xmlns attributes /// - Change unclear namespaces: qXX -> _preNamespace /// </summary> /// <param name="rootElement"></param> public static void RemoveEmptyNamespaces(XElement rootElement) { var nsAttributes = new List<Tuple<XAttribute, XAttribute>>(); var localNames = new HashSet<string>(); var allAttributes = rootElement.Descendants().SelectMany(x => x.Attributes()); allAttributes.Where(x => x.Name.LocalName == "xmlns" && string.IsNullOrEmpty(x.Value)).Remove(); foreach (var attribute in allAttributes.Where(y => y.IsNamespaceDeclaration && !string.IsNullOrEmpty(y.Value))) { if (attribute.Name.Namespace != "http://www.w3.org/2000/xmlns/" || !attribute.Name.LocalName.StartsWith("q")) continue; localNames.Add(attribute.Name.LocalName); nsAttributes.Add(new Tuple<XAttribute, XAttribute>(attribute, new XAttribute("_preNamespace", attribute.Value))); } foreach (var nsAttribute in nsAttributes) { var parent = nsAttribute.Item1.Parent; if (parent == null) continue; nsAttribute.Item1.Remove(); parent.Add(nsAttribute.Item2); } foreach (var attribute in allAttributes.Where(x => x.Value.Contains(":"))) { foreach (var localName in localNames.Where(x => attribute.Value.Contains(x))) { attribute.Value = attribute.Value.Replace(localName + ":", ""); break; } } }
/// <summary> /// Retrieves all events across all aggregates that are related to the specified aggregate ids, in the order in which they were recorded. /// </summary> /// <param name="events">The events.</param> /// <param name="relatedToAggregateIds">The aggregate ids to which the events relate.</param> public static async Task<IEnumerable<StorableEvent>> RelatedEvents( this IQueryable<StorableEvent> events, params Guid[] relatedToAggregateIds) { var ids = new HashSet<Guid>(relatedToAggregateIds); var relatedEvents = new HashSet<StorableEvent>(); int currentCount; do { currentCount = relatedEvents.Count; var unqueriedIds = ids.Where(id => ! relatedEvents.Select(e => e.AggregateId).Contains(id)); var newEvents = await events.Where(e => unqueriedIds.Any(id => id == e.AggregateId)).ToArrayAsync(); relatedEvents.UnionWith(newEvents); var moreIds = newEvents .SelectMany(e => e.Body.ExtractGuids()) .Distinct() .ToArray(); if (!moreIds.Any()) { break; } ids.UnionWith(moreIds); } while (currentCount != relatedEvents.Count); return relatedEvents.OrderBy(e => e.Id); }
public static void Main(string[] args) { // setup test data var PlatformSet = new HashSet<Platform>(); Platform Platform0; PlatformSet.Add(Platform0 = new Platform()); var ManSet = new HashSet<Man>(); Man Man0; ManSet.Add(Man0 = new Man()); Man Man1; ManSet.Add(Man1 = new Man()); Man0.ceiling = Platform0; Man1.ceiling = Platform0; Man0.floor = Platform0; Man1.floor = Platform0; var DateSet = new HashSet<Date>(); var PersonSet = new HashSet<Person>(); Eve Eve0; PersonSet.Add(Eve0 = Eve.Instance); var WomanSet = new HashSet<Woman>(); WomanSet.Add(Eve0); var EveSet = new HashSet<Eve>(); EveSet.Add(Eve0); // check test data Contract.Assert(Contract.ForAll(ManSet, m => ManSet.Where(n => FuncClass.Above(m, n)).Count() == 1), "BelowToo"); }
public override ValueType Solve() { var numbers = new HashSet<ulong>(new ulong[] {1}); const int ROWS = 51; ulong[] prev; ulong[] next = { 1 }; for(int size=2;size<=ROWS;size++) { prev = next; next = new ulong[size]; next[0] = next[size - 1] = 1; for(int i=1;i<size-1;i++) { next[i] = prev[i-1] + prev[i]; numbers.Add(next[i]); } } var candidates = numbers.Where(n => n%4 != 0).OrderBy(n => n).ToList(); foreach(ulong prime in PrimeExtensions.Primes((int)Math.Floor(Math.Sqrt(candidates.Last())))) { candidates = candidates.Where(n => n%(prime*prime) != 0).ToList(); } return candidates.Sum(n=>(long)n); }
private static string FormatData(HashSet <ObjectData> items) { var sb = new StringBuilder(); var lookup = items?.Where(x => x != null).ToLookup(x => x.TechType).ToArray(); for (int i = 0; i < lookup.Length; i++) { if (i < 4) { if (lookup[i].All(objectData => objectData.TechType != lookup[i].Key)) { continue; } sb.Append($"{Language.main.Get(lookup[i].Key)} x{lookup[i].Count()}"); sb.Append(Environment.NewLine); } else { sb.Append($"And More....."); break; } } return(sb.ToString()); }
/// <summary> /// Initializes a new instance of the <see cref="BootstrapConventions" /> class. /// </summary> public BootstrapConventions() { this.IsConcreteType = t => !t.IsInterface && !t.IsAbstract && !t.IsGenericType && t.Namespace != null; this.IsService = t => this.IsConcreteType( t ) && Topics.Radical.StringExtensions.IsLike( t.Namespace, "*.Services" ); this.SelectServiceContracts = type => { var interfaces = type.GetInterfaces(); var types = new HashSet<Type>( interfaces ) { type }; var contracts = types.Where( t => t.IsAttributeDefined<ContractAttribute>() ); if ( contracts.Any() ) { return contracts; } if ( interfaces.Any() ) { return interfaces; } return types; }; this.IsFactory = t => this.IsConcreteType( t ) && t.IsNested && t.Name.EndsWith( "Factory" ); this.SelectFactoryContracts = type => new[] { type }; this.IsValidator = t => this.IsConcreteType( t ) && t.Namespace.EndsWith( ".Validators" ) && t.Is( typeof( IValidator<> ) ); this.SelectValidatorContracts = t => { var contracts = t.GetInterfaces() .Where( i => i.IsAttributeDefined<ContractAttribute>() ) .ToArray(); return contracts; }; this.IsExcluded = t => { return t.IsAttributeDefined<DisableAutomaticRegistrationAttribute>(); }; //this.AssemblyFileScanPatterns = entryAssembly => //{ // var name = entryAssembly.GetName().Name; // var dllPattern = String.Format( "{0}*.dll", name ); // var radical = "Radical.*.dll"; // return new[] { dllPattern, radical }; //}; //this.IncludeAssemblyInContainerScan = assembly => true; }
public static IUnityContainer CreateAndRegisterAllDependencies() { //Referencing data so dll will be copied. Not really ideal. var container = new UnityContainer(); var dsTypes = new HashSet<Type>(AppDomain.CurrentDomain.GetAssemblies().Where(a => a.FullName.StartsWith("Foodies")) .SelectMany(s => s.GetTypes())); foreach (var dsInterfaces in dsTypes.Where(t => t.IsInterface)) { var assignableType = dsTypes.Where(t => dsInterfaces.IsAssignableFrom(t) && t != dsInterfaces); foreach (var type in assignableType) { container.RegisterType(dsInterfaces, type); } } return container; }
public static IFeedResult AppendError(this IFeedResult self, TransportMessage message, FeedError error) { var errorItems = new HashSet<TransportMessage>(self.FailedMessages); var errorMessage = errorItems.Where(x => x == message).SingleOrDefault() ?? message; errorItems.Add(new TransportMessage(errorMessage, error)); return new MessageProcessor.FeedResult(self.SuccessfulMessages, errorItems); }
public static string[] GetProgramFilesPaths() { var paths = new HashSet<string>(); paths.Add(Environment.GetEnvironmentVariable("ProgramFiles(x86)")); paths.Add(Environment.GetEnvironmentVariable("ProgramFiles")); paths.Add(Environment.GetEnvironmentVariable("ProgramW6432")); return paths.Where(p => p != null).ToArray(); }
private static HashSet<string> CreateIndex(HashSet<string> left, HashSet<string> right) { var idx = new HashSet<string>(); foreach (var item in left.Where(right.Contains)) idx.Add(item); return idx; }
public IEnumerable<IStylingRule> GetUnusedRules() { lock (_sync) { var unusedRules = new HashSet<IStylingRule>(GetAllRules()); unusedRules.ExceptWith(_ruleUsages.Select(x => x.Rule).Distinct()); return unusedRules.Where(x => !UsageRegistry.IsAProtectedClass(x)).ToList(); } }
/// <summary> /// Load a setting from the default setting path /// </summary> /// <param name="key"></param> /// <returns></returns> public SettingPair GetValue(string key) { if (!settingsLoaded) { Reload(); } return(settings?.Where(o => o.Key == key).FirstOrDefault()); }
static void Main() { int N = // ... var allTrees = new HashSet<HashSet<int>>(); var edges = new Tuple<int, int, int>[N]; var nodes = new HashSet<int>(); // Initialization -> edges / nodes // Represend each node as tree foreach (var node in nodes) { var tree = new HashSet<int>(); tree.Add(node); allTrees.Add(tree); } // Sorting edges by their weight Array.Sort(edges, (a, b) => a.Item3.CompareTo(b.Item3)); double result = 0; // Minimal spanning tree distance foreach (var edge in edges) { var tree1 = allTrees.Where(tree => tree.Contains(edge.Item1)).First(); var tree2 = allTrees.Where(tree => tree.Contains(edge.Item2)).First(); // Elements are in same tree if (tree1.Equals(tree2)) continue; result += edge.Item3; // Combine trees tree1.UnionWith(tree2); allTrees.Remove(tree2); // Small optimization if (allTrees.Count == 1) break; } Console.WriteLine(result); }
/// <summary> /// Determines whether the interpreter factory contains the specified /// modules. /// </summary> /// <returns>The names of the modules that were found.</returns> public static async Task<HashSet<string>> FindModulesAsync(this IPythonInterpreterFactory factory, params string[] moduleNames) { var finding = new HashSet<string>(moduleNames); var found = new HashSet<string>(); var withPackages = factory.PackageManager; if (withPackages != null) { foreach (var m in finding) { if ((await withPackages.GetInstalledPackageAsync(new PackageSpec(m), CancellationToken.None)).IsValid) { found.Add(m); } } finding.ExceptWith(found); if (!finding.Any()) { // Found all of them, so stop searching return found; } } var withDb = factory as PythonInterpreterFactoryWithDatabase; if (withDb != null && withDb.IsCurrent) { var db = withDb.GetCurrentDatabase(); found.UnionWith(finding.Where(m => db.GetModule(m) != null)); // Always stop searching after this step return found; } if (withDb != null) { try { var paths = await PythonTypeDatabase.GetDatabaseSearchPathsAsync(withDb); found.UnionWith(PythonTypeDatabase.GetDatabaseExpectedModules(withDb.Configuration.Version, paths) .SelectMany() .Select(g => g.ModuleName) .Where(m => finding.Contains(m))); } catch (InvalidOperationException) { } finding.ExceptWith(found); if (!finding.Any()) { // Found all of them, so stop searching return found; } } return await Task.Run(() => { foreach (var mp in ModulePath.GetModulesInLib(factory.Configuration)) { if (finding.Remove(mp.ModuleName)) { found.Add(mp.ModuleName); } if (!finding.Any()) { break; } } return found; }); }
static void Main() { int n = int.Parse(Console.ReadLine()); var edges = new Tuple<int, int, int>[n]; var nodes = new HashSet<int>(); for (int i = 0; i < n; i++) { var input = Console.ReadLine().Split(' ').Select(int.Parse).ToArray(); edges[i] = new Tuple<int, int, int>(input[0], input[1], input[2]); nodes.Add(input[0]); nodes.Add(input[1]); } var allTrees = new HashSet<HashSet<int>>(); foreach (var node in nodes) { var tree = new HashSet<int>(); tree.Add(node); allTrees.Add(tree); } Array.Sort(edges, (a, b) => a.Item3.CompareTo(b.Item3)); long result = 0; foreach (var edge in edges) { var tree1 = allTrees.Where(tree => tree.Contains(edge.Item1)).First(); var tree2 = allTrees.Where(tree => tree.Contains(edge.Item2)).First(); if (tree1.Equals(tree2)) continue; result += edge.Item3; tree1.UnionWith(tree2); allTrees.Remove(tree2); if (allTrees.Count == 1) break; } Console.WriteLine(result); }
public static IFeedResult AppendError(this IFeedResult self, IEnumerable<TransportMessage> messages, FeedError error) { var errorItems = new HashSet<TransportMessage>(self.FailedMessages); foreach (var failedMessage in messages) { var errorMessage = errorItems.Where(x => x == failedMessage).SingleOrDefault() ?? failedMessage; errorItems.Add(new TransportMessage(errorMessage, error)); } return new MessageProcessor.FeedResult(self.SuccessfulMessages, errorItems); }
string Main() { // We don't necessarily need to look at every permutation // Just need to be find every cube that contains the exact same characters // There is still ways to improve the speed of this by keeping track of permutations?"; HashSet<long> cubes = new HashSet<long>(); int max = 20000; int maxCount = 0; long finalCube = 0; for (long i = 1; i < max; i++) cubes.Add(i * i * i); foreach (long cube in cubes) { int validPermutations = 1; var currentCubeLength = cube.ToString().Length; var currentCubeChars = cube.ToString().ToCharArray().OrderBy(c => c).ToList(); var cubesOfSameLength = cubes.Where(c => c.ToString().Length == currentCubeLength && c != cube).ToList(); foreach (long similarCube in cubesOfSameLength) { var similarCubeChars = similarCube.ToString().ToCharArray().OrderBy(c => c).ToList(); bool isMatch = true; for (int i = 0; i < currentCubeLength; i++) { if (currentCubeChars[i] != similarCubeChars[i]) { isMatch = false; break; } } if (isMatch) validPermutations++; } if (validPermutations > maxCount) { maxCount = validPermutations; finalCube = cube; Console.WriteLine(cube + ", " + validPermutations); } if (maxCount == 5) break; } Console.WriteLine("Cube permutations: " + maxCount); Console.WriteLine("Cube: " + finalCube); Console.WriteLine("Cubic: " + Math.Pow(finalCube, (1.0 / 3.0))); return ""; }
/// <summary> /// This method calls the underlying collectors and initiates a flush of any pending data. /// </summary> public async Task Flush() { try { mCollectors?.Where((c) => c.CanFlush).ForEach((c) => c.Flush()); } catch (Exception ex) { this.LogException("DataCollectionContainer/Flush failed.", ex); } }
public IEnumerable<Actor> UnitsInRange(CPos xy) { var range = ((ChronoshiftPowerInfo)Info).Range; var tiles = Self.World.Map.FindTilesInCircle(xy, range); var units = new HashSet<Actor>(); foreach (var t in tiles) units.UnionWith(Self.World.ActorMap.GetActorsAt(t)); return units.Where(a => a.Info.HasTraitInfo<ChronoshiftableInfo>() && !a.TraitsImplementing<IPreventsTeleport>().Any(condition => condition.PreventsTeleport(a))); }
/// <summary> /// Gives all packages for an Id, and all dependencies recursively. /// </summary> public override async Task<IEnumerable<PackageDependencyInfo>> ResolvePackages(IEnumerable<string> packageIds, NuGetFramework projectFramework, bool includePrerelease, CancellationToken token) { HashSet<PackageDependencyInfo> result = new HashSet<PackageDependencyInfo>(PackageIdentityComparer.Default); foreach (string packageId in packageIds) { result.UnionWith(await GetPackagesFromRegistration(packageId, AllVersions, projectFramework, token)); } return result.Where(e => includePrerelease || !e.Version.IsPrerelease); }
public static SPEvolutionPlan BuildMigrationPlan(long target, IEnumerable<long> applied, IEnumerable<long> availableEvos) { long startVersion = applied.Count() < 1 ? 0 : applied.Max(); HashSet<long> set = new HashSet<long>(applied); // проверки var list = availableEvos.Where(x => x < startVersion && !set.Contains(x)); if (list.Count() > 1) { throw new ArgumentException ("Current Evolution Schema Version Error: There is not applied evolutions step");//, list); } set.UnionWith(availableEvos); var versions = target < startVersion ? set.Where(n => n <= startVersion && n > target).OrderByDescending(x => x).ToList() : set.Where(n => n > startVersion && n <= target).OrderBy(x => x).ToList(); return new SPEvolutionPlan(versions, startVersion); }
public IEnumerable<PhonebookEntry> Find(string name, string town) { HashSet<PhonebookEntry> matchedByName = new HashSet<PhonebookEntry>(); matchedByName.UnionWith(this.byFirstName[name]); matchedByName.UnionWith(this.byMiddleName[name]); matchedByName.UnionWith(this.byLastName[name]); matchedByName.UnionWith(this.byNickname[name]); IEnumerable<PhonebookEntry> matchedByNameAndTown = matchedByName.Where(entry => entry.Town == town); return matchedByNameAndTown; }
public FilterData( int?minPacketNumber, int?maxPacketNumber, ICollection <uint>?excludedEntries, ICollection <uint>?includedEntries, ICollection <UniversalGuid>?excludedGuids, ICollection <UniversalGuid>?includedGuids, ICollection <string>?excludedOpcodes, ICollection <string>?includedOpcodes, ICollection <int>?includedPacketNumbers) { this.MinPacketNumber = minPacketNumber; this.MaxPacketNumber = maxPacketNumber; this.excludedEntries = excludedEntries == null ? null : new HashSet <uint>(excludedEntries); this.includedEntries = includedEntries == null ? null : new HashSet <uint>(includedEntries); this.excludedGuids = excludedGuids == null ? null : new HashSet <UniversalGuid>(excludedGuids); this.includedGuids = includedGuids == null ? null : new HashSet <UniversalGuid>(includedGuids); this.excludedOpcodes = excludedOpcodes == null ? null : new HashSet <string>(excludedOpcodes); this.includedOpcodes = includedOpcodes == null ? null : new HashSet <string>(includedOpcodes); this.includedPacketNumbers = includedPacketNumbers == null ? null : new HashSet <int>(includedPacketNumbers); includedOpcodesWildcards = includedOpcodes?.Where(c => c.EndsWith("*")).Select(c => c.Substring(0, c.Length - 1)).ToList(); excludedOpcodesWildcards = excludedOpcodes?.Where(c => c.EndsWith("*")).Select(c => c.Substring(0, c.Length - 1)).ToList(); }
public string GetCode(MetadataTypes metadata, IRequest request) { var namespaces = Config.GetDefaultNamespaces(metadata); var typeNamespaces = new HashSet <string>(); metadata.RemoveIgnoredTypesForNet(Config); metadata.Types.Each(x => typeNamespaces.Add(x.Namespace)); metadata.Operations.Each(x => { typeNamespaces.Add(x.Request.Namespace); if (x.Response != null) { typeNamespaces.Add(x.Response.Namespace); } }); // Look first for shortest Namespace ending with `ServiceModel` convention, else shortest ns var globalNamespace = Config.GlobalNamespace ?? typeNamespaces.Where(x => x.EndsWith("ServiceModel")) .OrderBy(x => x).FirstOrDefault() ?? typeNamespaces.OrderBy(x => x).First(); Func <string, string> defaultValue = k => request.QueryString[k].IsNullOrEmpty() ? "//" : ""; var sbInner = new StringBuilder(); var sb = new StringBuilderWrapper(sbInner); sb.AppendLine("(* Options:"); sb.AppendLine("Date: {0}".Fmt(DateTime.Now.ToString("s").Replace("T", " "))); sb.AppendLine("Version: {0}".Fmt(Env.VersionString)); sb.AppendLine("Tip: {0}".Fmt(HelpMessages.NativeTypesDtoOptionsTip.Fmt("//"))); sb.AppendLine("BaseUrl: {0}".Fmt(Config.BaseUrl)); if (Config.UsePath != null) { sb.AppendLine("UsePath: {0}".Fmt(Config.UsePath)); } sb.AppendLine(); sb.AppendLine("{0}GlobalNamespace: {1}".Fmt(defaultValue("GlobalNamespace"), Config.GlobalNamespace)); sb.AppendLine("{0}MakeDataContractsExtensible: {1}".Fmt(defaultValue("MakeDataContractsExtensible"), Config.MakeDataContractsExtensible)); sb.AppendLine("{0}AddReturnMarker: {1}".Fmt(defaultValue("AddReturnMarker"), Config.AddReturnMarker)); sb.AppendLine("{0}AddDescriptionAsComments: {1}".Fmt(defaultValue("AddDescriptionAsComments"), Config.AddDescriptionAsComments)); sb.AppendLine("{0}AddDataContractAttributes: {1}".Fmt(defaultValue("AddDataContractAttributes"), Config.AddDataContractAttributes)); sb.AppendLine("{0}AddIndexesToDataMembers: {1}".Fmt(defaultValue("AddIndexesToDataMembers"), Config.AddIndexesToDataMembers)); sb.AppendLine("{0}AddGeneratedCodeAttributes: {1}".Fmt(defaultValue("AddGeneratedCodeAttributes"), Config.AddGeneratedCodeAttributes)); sb.AppendLine("{0}AddResponseStatus: {1}".Fmt(defaultValue("AddResponseStatus"), Config.AddResponseStatus)); sb.AppendLine("{0}AddImplicitVersion: {1}".Fmt(defaultValue("AddImplicitVersion"), Config.AddImplicitVersion)); sb.AppendLine("{0}ExportValueTypes: {1}".Fmt(defaultValue("ExportValueTypes"), Config.ExportValueTypes)); sb.AppendLine("{0}IncludeTypes: {1}".Fmt(defaultValue("IncludeTypes"), Config.IncludeTypes.Safe().ToArray().Join(","))); sb.AppendLine("{0}ExcludeTypes: {1}".Fmt(defaultValue("ExcludeTypes"), Config.ExcludeTypes.Safe().ToArray().Join(","))); sb.AppendLine("{0}InitializeCollections: {1}".Fmt(defaultValue("InitializeCollections"), Config.InitializeCollections)); //sb.AppendLine("{0}AddDefaultXmlNamespace: {1}".Fmt(defaultValue("AddDefaultXmlNamespace"), Config.AddDefaultXmlNamespace)); sb.AppendLine("{0}AddNamespaces: {1}".Fmt(defaultValue("AddNamespaces"), Config.AddNamespaces.Safe().ToArray().Join(","))); sb.AppendLine("*)"); sb.AppendLine(); string lastNS = null; var existingTypes = new HashSet <string>(); var requestTypes = metadata.Operations.Select(x => x.Request).ToSet(); var requestTypesMap = metadata.Operations.ToSafeDictionary(x => x.Request); var responseTypes = metadata.Operations .Where(x => x.Response != null) .Select(x => x.Response).ToSet(); var types = metadata.Types.ToSet(); allTypes = new List <MetadataType>(); allTypes.AddRange(types); allTypes.AddRange(responseTypes); allTypes.AddRange(requestTypes); var orderedTypes = FilterTypes(allTypes); sb.AppendLine("namespace {0}".Fmt(globalNamespace.SafeToken())); sb.AppendLine(); foreach (var ns in namespaces.Where(x => !string.IsNullOrEmpty(x))) { sb.AppendLine("open " + ns); } if (Config.AddGeneratedCodeAttributes) { sb.AppendLine("open System.CodeDom.Compiler"); } var insertCode = InsertCodeFilter?.Invoke(allTypes, Config); if (insertCode != null) { sb.AppendLine(insertCode); } foreach (var type in orderedTypes) { var fullTypeName = type.GetFullName(); if (requestTypes.Contains(type)) { if (!existingTypes.Contains(fullTypeName)) { MetadataType response = null; if (requestTypesMap.TryGetValue(type, out var operation)) { response = operation.Response; } lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { Routes = metadata.Operations.GetRoutes(type), ImplementsFn = () => { if (!Config.AddReturnMarker && operation?.ReturnsVoid != true && operation?.ReturnType == null) { return(null); } if (operation?.ReturnsVoid == true) { return(nameof(IReturnVoid)); } if (operation?.ReturnType != null) { return(Type("IReturn`1", new[] { Type(operation.ReturnType) })); } return(response != null ? Type("IReturn`1", new[] { Type(response.Name, response.GenericArgs) }) : null); }, IsRequest = true, Op = operation, }); existingTypes.Add(fullTypeName); } } else if (responseTypes.Contains(type)) { if (!existingTypes.Contains(fullTypeName) && !Config.IgnoreTypesInNamespaces.Contains(type.Namespace)) { lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { IsResponse = true, }); existingTypes.Add(fullTypeName); } } else if (types.Contains(type) && !existingTypes.Contains(fullTypeName)) { lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { IsType = true }); existingTypes.Add(fullTypeName); } } var addCode = AddCodeFilter?.Invoke(allTypes, Config); if (addCode != null) { sb.AppendLine(addCode); } sb.AppendLine(); return(StringBuilderCache.ReturnAndFree(sbInner)); }
static void Main(string[] args) { var stopwatch = new Stopwatch(); Console.WriteLine("Enter the path to the directory"); var directory = args != null && args.Length >= 1 ? args[0] : Console.ReadLine(); var takeFileTime = new TimeSpan(); var filtredFiles = new TimeSpan(); if (!Directory.Exists(directory)) { Console.WriteLine("Directory not found"); } else { stopwatch.Start(); var directoryInfo = new DirectoryInfo(directory); try { GetAllFileFromDirectory(directoryInfo); takeFileTime = stopwatch.Elapsed; } catch (UnauthorizedAccessException e) { Console.WriteLine(e.Message); Console.WriteLine("Press any key"); Console.ReadKey(); Environment.Exit(0); } var sorted = _files .AsParallel() .Where(f => _files .Where(fi => f.Length == fi.Length) .Skip(1) .Any()) .Select(x => new { x.FullName }) .ToList(); filtredFiles = stopwatch.Elapsed; _list = new File[sorted.Count]; Parallel.For(0, sorted.Count, i => { _list[i] = new File() { FileInfo = sorted[i].FullName, Hash = GetCode(sorted[i].FullName) }; }); time = stopwatch.Elapsed; var group = _list.AsParallel().GroupBy(x => x.Hash); foreach (var groupItem in group) { if (groupItem.Select(p => p).AsParallel().Skip(1).Any()) { _builder.AppendLine("Same files:"); foreach (var item in groupItem) { _builder.AppendLine(item.FileInfo); } } } } Console.WriteLine(_builder); stopwatch.Stop(); Console.WriteLine("Take all files " + takeFileTime); Console.WriteLine("All files " + _files.Count); Console.WriteLine("Filtered files " + filtredFiles); Console.WriteLine("Time without output " + time); Console.WriteLine("Time all " + stopwatch.Elapsed); Console.ReadKey(); }
/// <summary> /// The initialize method. /// </summary> public static void Initialize() { try { if (_initialized) { return; } _initialized = true; CustomEvents.Game.OnGameLoad += delegate { Player = ObjectManager.Player; AllyNexus = ObjectManager.Get <Obj_HQ>().FirstOrDefault(o => o.IsAlly); EnemyNexus = ObjectManager.Get <Obj_HQ>().FirstOrDefault(o => o.IsEnemy); HeroesList.UnionWith(ObjectManager.Get <Obj_AI_Hero>()); MinionsList.UnionWith( ObjectManager.Get <Obj_AI_Minion>() .Where( o => o.Team != GameObjectTeam.Neutral && !o.CharData.BaseSkinName.ToLower().Contains("ward") && !o.CharData.BaseSkinName.ToLower().Contains("trinket"))); InhibitorsList.UnionWith(ObjectManager.Get <Obj_BarracksDampener>()); TurretsList.UnionWith(ObjectManager.Get <Obj_AI_Turret>()); JungleList.UnionWith( ObjectManager.Get <Obj_AI_Minion>().Where(o => o.Team == GameObjectTeam.Neutral)); WardsList.UnionWith( ObjectManager.Get <Obj_AI_Minion>() .Where( o => o.CharData.BaseSkinName.ToLower().Contains("ward") || o.CharData.BaseSkinName.ToLower().Contains("trinket"))); ShopsList.UnionWith(ObjectManager.Get <Obj_Shop>()); SpawnPointsList.UnionWith(ObjectManager.Get <Obj_SpawnPoint>()); GameObjectsList.UnionWith(ObjectManager.Get <GameObject>()); EnemyHeroesList.UnionWith(HeroesList.Where(o => o.IsEnemy)); EnemyMinionsList.UnionWith(MinionsList.Where(o => o.IsEnemy)); EnemyInhibitorsList.UnionWith(InhibitorsList.Where(o => o.IsEnemy)); EnemyTurretsList.UnionWith(TurretsList.Where(o => o.IsEnemy)); EnemyList.UnionWith( EnemyHeroesList.Concat(EnemyMinionsList.Cast <Obj_AI_Base>()).Concat(EnemyTurretsList)); AllyHeroesList.UnionWith(HeroesList.Where(o => o.IsAlly)); AllyMinionsList.UnionWith(MinionsList.Where(o => o.IsAlly)); AllyInhibitorsList.UnionWith(InhibitorsList.Where(o => o.IsAlly)); AllyTurretsList.UnionWith(TurretsList.Where(o => o.IsAlly)); AllyList.UnionWith( AllyHeroesList.Concat(AllyMinionsList.Cast <Obj_AI_Base>()).Concat(AllyTurretsList)); AllyWardsList.UnionWith(WardsList.Where(o => o.IsAlly)); EnemyWardsList.UnionWith(WardsList.Where(o => o.IsEnemy)); AllyShopsList.UnionWith(ShopsList.Where(o => o.IsAlly)); EnemyShopsList.UnionWith(ShopsList.Where(o => o.IsEnemy)); AllySpawnPointsList.UnionWith(SpawnPointsList.Where(o => o.IsAlly)); EnemySpawnPointsList.UnionWith(SpawnPointsList.Where(o => o.IsEnemy)); GameObject.OnCreate += OnGameObjectCreate; GameObject.OnDelete += OnGameObjectDelete; Game.OnUpdate += OnGameUpdate; }; } catch (Exception ex) { Console.WriteLine(ex); } }
public override IEnumerable <TodoItem> OnReturn(HashSet <TodoItem> resources, ResourcePipeline pipeline) { return(resources.Where(t => t.Description != "This should not be included")); }
private static void OnDrawingDraw(EventArgs args) { try { if (_menu == null || ObjectManager.Player.IsDead) { return; } var weightsRange = _menu.Item(_menu.Name + ".drawing.weights-range").GetValue <bool>(); var weightsSimple = _menu.Item(_menu.Name + ".drawing.weights-simple").GetValue <bool>(); var weightsAdvanced = _menu.Item(_menu.Name + ".drawing.weights-advanced").GetValue <bool>(); var weightMultiplicator = _menu.Item(_menu.Name + ".weights.heroes.weight-multiplicator").GetValue <Slider>().Value; var lastTarget = _menu.Item(_menu.Name + ".drawing.last-target").GetValue <Circle>(); var assassin = _menu.Item(_menu.Name + ".assassin-mode.enabled").GetValue <bool>(); var assassinColor = _menu.Item(_menu.Name + ".drawing.assassin-color").GetValue <Circle>(); var assassinRange = _menu.Item(_menu.Name + ".assassin-mode.range").GetValue <Slider>().Value; var circleThickness = _menu.Item(_menu.Name + ".drawing.circle-thickness").GetValue <Slider>().Value; var focusSelected = _menu.Item(_menu.Name + ".focus-selected").GetValue <bool>(); var selected = _menu.Item(_menu.Name + ".drawing.selected-color").GetValue <Circle>(); if (_selectedTarget != null && _selectedTarget.IsValidTarget() && focusSelected && selected.Active) { Render.Circle.DrawCircle( _selectedTarget.Position, _selectedTarget.BoundingRadius + SelectClickBuffer, selected.Color, circleThickness); } if (assassin && assassinColor.Active) { foreach (var target in GameObjects.EnemyHeroes.Where( h => _menu.Item(_menu.Name + ".assassin-mode.heroes." + h.ChampionName).GetValue <bool>() && h.IsValidTarget(assassinRange) && h.Position.IsOnScreen())) { Render.Circle.DrawCircle( target.Position, target.BoundingRadius + SelectClickBuffer, assassinColor.Color, circleThickness); } Render.Circle.DrawCircle( ObjectManager.Player.Position, assassinRange, assassinColor.Color, circleThickness); } if (lastTarget.Active) { if (_lastTarget != null && !_lastTarget.IsDead && _lastTarget.IsVisible && _lastTarget.Position.IsOnScreen()) { Render.Circle.DrawCircle( _lastTarget.Position, _lastTarget.BoundingRadius + SelectClickBuffer, lastTarget.Color, circleThickness); } } if ((weightsSimple || weightsAdvanced) && _tsMode == TargetSelectorModeType.Weights) { var enemies = GameObjects.EnemyHeroes.Where( h => h.IsValidTarget(weightsRange ? _debugRange : float.MaxValue) && h.Position.IsOnScreen()) .ToList(); foreach (var weight in WeightedItems.Where(w => w.Weight > 0)) { weight.UpdateSimulatedMaxValue(enemies); } foreach (var target in enemies) { var position = Drawing.WorldToScreen(target.Position); var totalWeight = 0f; var offset = 0f; foreach (var weight in WeightedItems.Where(w => w.Weight > 0)) { var lastWeight = weight.SimulatedCalculatedWeight(target); if (lastWeight > 0) { if (_menu != null) { var heroMultiplicator = _menu.Item(_menu.Name + ".weights.heroes." + target.ChampionName) .GetValue <Slider>() .Value; if (heroMultiplicator > 1) { lastWeight += _averageWeight * heroMultiplicator; } if (weightMultiplicator > 1) { lastWeight *= weightMultiplicator; } } if (weightsAdvanced) { Drawing.DrawText( position.X + target.BoundingRadius, position.Y - 100 + offset, Color.White, lastWeight.ToString("0.0").Replace(",", ".") + " - " + weight.DisplayName); offset += 17f; } totalWeight += lastWeight; } } if (weightsSimple) { Drawing.DrawText( target.HPBarPosition.X + 55f, target.HPBarPosition.Y - 20f, Color.White, totalWeight.ToString("0.0").Replace(",", ".")); } } } } catch (Exception ex) { Global.Logger.AddItem(new LogItem(ex)); } }
public string GetCode(MetadataTypes metadata) { var namespaces = new HashSet <string>(); Config.DefaultNamespaces.Each(x => namespaces.Add(x)); metadata.Types.Each(x => namespaces.Add(x.Namespace)); metadata.Operations.Each(x => namespaces.Add(x.Request.Namespace)); var sb = new StringBuilderWrapper(new StringBuilder()); sb.AppendLine("(* Options:"); sb.AppendLine("Version: {0}".Fmt(Version)); sb.AppendLine("BaseUrl: {0}".Fmt(Config.BaseUrl)); sb.AppendLine(); sb.AppendLine("ServerVersion: {0}".Fmt(metadata.Version)); sb.AppendLine("MakePartial: {0}".Fmt(Config.MakePartial)); sb.AppendLine("MakeVirtual: {0}".Fmt(Config.MakeVirtual)); sb.AppendLine("MakeDataContractsExtensible: {0}".Fmt(Config.MakeDataContractsExtensible)); sb.AppendLine("AddReturnMarker: {0}".Fmt(Config.AddReturnMarker)); sb.AppendLine("AddDescriptionAsComments: {0}".Fmt(Config.AddDescriptionAsComments)); sb.AppendLine("AddDataContractAttributes: {0}".Fmt(Config.AddDataContractAttributes)); sb.AppendLine("AddIndexesToDataMembers: {0}".Fmt(Config.AddIndexesToDataMembers)); sb.AppendLine("AddResponseStatus: {0}".Fmt(Config.AddResponseStatus)); sb.AppendLine("AddImplicitVersion: {0}".Fmt(Config.AddImplicitVersion)); sb.AppendLine("InitializeCollections: {0}".Fmt(Config.InitializeCollections)); sb.AppendLine("AddDefaultXmlNamespace: {0}".Fmt(Config.AddDefaultXmlNamespace)); //sb.AppendLine("DefaultNamespaces: {0}".Fmt(Config.DefaultNamespaces.ToArray().Join(", "))); sb.AppendLine("*)"); sb.AppendLine(); namespaces.Each(x => sb.AppendLine("open {0}".Fmt(x))); if (Config.AddDataContractAttributes && Config.AddDefaultXmlNamespace != null) { sb.AppendLine(); var list = namespaces.Where(x => !Config.DefaultNamespaces.Contains(x)).ToList(); list.ForEach(x => sb.AppendLine("[<assembly: ContractNamespace(\"{0}\", ClrNamespace=\"{1}\")>]" .Fmt(Config.AddDefaultXmlNamespace, x))); if (list.Count > 0) { sb.AppendLine("do()"); //http://scottseely.com/2009/01/23/f-assembly-level-attributes-assemblyinfo-fs-and-do/ } } sb.AppendLine(); string lastNS = null; var existingOps = new HashSet <string>(); var requestTypes = metadata.Operations.Select(x => x.Request).ToHashSet(); var requestTypesMap = metadata.Operations.ToSafeDictionary(x => x.Request); var responseTypes = metadata.Operations .Where(x => x.Response != null) .Select(x => x.Response).ToHashSet(); var types = metadata.Types.ToHashSet(); var allTypes = new List <MetadataType>(); allTypes.AddRange(requestTypes); allTypes.AddRange(responseTypes); allTypes.AddRange(types); var orderedTypes = allTypes .OrderBy(x => x.Namespace) .ThenBy(x => x.Name); foreach (var type in orderedTypes) { var fullTypeName = type.GetFullName(); if (requestTypes.Contains(type)) { if (!existingOps.Contains(fullTypeName)) { MetadataType response = null; MetadataOperationType operation; if (requestTypesMap.TryGetValue(type, out operation)) { response = operation.Response; } lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { ImplementsFn = () => { if (!Config.AddReturnMarker && !type.ReturnVoidMarker && type.ReturnMarkerTypeName == null) { return(null); } if (type.ReturnVoidMarker) { return("IReturnVoid"); } if (type.ReturnMarkerTypeName != null) { return(Type("IReturn`1", new[] { Type(type.ReturnMarkerTypeName) })); } return(response != null ? Type("IReturn`1", new[] { Type(type.Name, type.GenericArgs) }) : null); }, IsRequest = true, }); existingOps.Add(fullTypeName); } } else if (responseTypes.Contains(type)) { if (!existingOps.Contains(fullTypeName) && !Config.IgnoreTypesInNamespaces.Contains(type.Namespace)) { lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { IsResponse = true, }); existingOps.Add(fullTypeName); } } else if (types.Contains(type) && !existingOps.Contains(fullTypeName)) { lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { IsType = true }); } } if (lastNS != null) { sb.AppendLine("}"); } sb.AppendLine(); return(sb.ToString()); }
List <ILNode> ConvertToAst(List <ByteCode> body, HashSet <ExceptionHandler> ehs) { List <ILNode> ast = new List <ILNode>(); while (ehs.Any()) { ILTryCatchBlock tryCatchBlock = new ILTryCatchBlock(); // Find the first and widest scope int tryStart = ehs.Min(eh => eh.TryStart.Offset); int tryEnd = ehs.Where(eh => eh.TryStart.Offset == tryStart).Max(eh => eh.TryEnd.Offset); var handlers = ehs.Where(eh => eh.TryStart.Offset == tryStart && eh.TryEnd.Offset == tryEnd).OrderBy(eh => eh.TryStart.Offset).ToList(); // Remember that any part of the body migt have been removed due to unreachability // Cut all instructions up to the try block { int tryStartIdx = 0; while (tryStartIdx < body.Count && body[tryStartIdx].Offset < tryStart) { tryStartIdx++; } ast.AddRange(ConvertToAst(body.CutRange(0, tryStartIdx))); } // Cut the try block { HashSet <ExceptionHandler> nestedEHs = new HashSet <ExceptionHandler>(ehs.Where(eh => (tryStart <= eh.TryStart.Offset && eh.TryEnd.Offset < tryEnd) || (tryStart < eh.TryStart.Offset && eh.TryEnd.Offset <= tryEnd))); ehs.ExceptWith(nestedEHs); int tryEndIdx = 0; while (tryEndIdx < body.Count && body[tryEndIdx].Offset < tryEnd) { tryEndIdx++; } tryCatchBlock.TryBlock = new ILBlock(ConvertToAst(body.CutRange(0, tryEndIdx), nestedEHs)); } // Cut all handlers tryCatchBlock.CatchBlocks = new List <ILTryCatchBlock.CatchBlock>(); foreach (ExceptionHandler eh in handlers) { int handlerEndOffset = eh.HandlerEnd == null ? methodDef.Body.CodeSize : eh.HandlerEnd.Offset; int startIdx = 0; while (startIdx < body.Count && body[startIdx].Offset < eh.HandlerStart.Offset) { startIdx++; } int endIdx = 0; while (endIdx < body.Count && body[endIdx].Offset < handlerEndOffset) { endIdx++; } HashSet <ExceptionHandler> nestedEHs = new HashSet <ExceptionHandler>(ehs.Where(e => (eh.HandlerStart.Offset <= e.TryStart.Offset && e.TryEnd.Offset < handlerEndOffset) || (eh.HandlerStart.Offset < e.TryStart.Offset && e.TryEnd.Offset <= handlerEndOffset))); ehs.ExceptWith(nestedEHs); List <ILNode> handlerAst = ConvertToAst(body.CutRange(startIdx, endIdx - startIdx), nestedEHs); if (eh.HandlerType == ExceptionHandlerType.Catch) { ILTryCatchBlock.CatchBlock catchBlock = new ILTryCatchBlock.CatchBlock() { ExceptionType = eh.CatchType, Body = handlerAst }; // Handle the automatically pushed exception on the stack ByteCode ldexception = ldexceptions[eh]; if (ldexception.StoreTo == null || ldexception.StoreTo.Count == 0) { // Exception is not used catchBlock.ExceptionVariable = null; } else if (ldexception.StoreTo.Count == 1) { ILExpression first = catchBlock.Body[0] as ILExpression; if (first != null && first.Code == ILCode.Pop && first.Arguments[0].Code == ILCode.Ldloc && first.Arguments[0].Operand == ldexception.StoreTo[0]) { // The exception is just poped - optimize it all away; if (context.Settings.AlwaysGenerateExceptionVariableForCatchBlocks) { catchBlock.ExceptionVariable = new ILVariable() { Name = "ex_" + eh.HandlerStart.Offset.ToString("X2"), IsGenerated = true } } ; else { catchBlock.ExceptionVariable = null; } catchBlock.Body.RemoveAt(0); } else { catchBlock.ExceptionVariable = ldexception.StoreTo[0]; } } else { ILVariable exTemp = new ILVariable() { Name = "ex_" + eh.HandlerStart.Offset.ToString("X2"), IsGenerated = true }; catchBlock.ExceptionVariable = exTemp; foreach (ILVariable storeTo in ldexception.StoreTo) { catchBlock.Body.Insert(0, new ILExpression(ILCode.Stloc, storeTo, new ILExpression(ILCode.Ldloc, exTemp))); } } tryCatchBlock.CatchBlocks.Add(catchBlock); } else if (eh.HandlerType == ExceptionHandlerType.Finally) { tryCatchBlock.FinallyBlock = new ILBlock(handlerAst); } else if (eh.HandlerType == ExceptionHandlerType.Fault) { tryCatchBlock.FaultBlock = new ILBlock(handlerAst); } else { // TODO: ExceptionHandlerType.Filter } } ehs.ExceptWith(handlers); ast.Add(tryCatchBlock); } // Add whatever is left ast.AddRange(ConvertToAst(body)); return(ast); }
public List <DimensionItem> Get(DatastoreEntities context, RepositorySchema schema, int id, IEnumerable <DataFieldUpdate> list = null) { if (schema == null) { throw new Exception("The schema is null"); } try { var dimensionValueTableName = SqlHelper.GetDimensionValueTableName(schema.ID); var dimensionValueTableNameParent = string.Empty; var parameters = new List <SqlParameter>(); var didxParam = 0; lock (_cache) { var retval = GetCache(context, id, schema); #region Do this after "GetCache" call as it will flush the cache if need be //If there is a parent repository then get parent schema as will will need to know which dimension table to use for different fields RepositorySchema diff = null; if (schema.ParentID != null) { if (!_parentSchemaCache.ContainsKey(schema.ID)) { var parentSchema = RepositoryManager.GetSchema(schema.ParentID.Value); _parentSchemaCache.Add(schema.ID, schema.Subtract(parentSchema)); } diff = _parentSchemaCache[schema.ID]; dimensionValueTableNameParent = SqlHelper.GetDimensionValueTableName(schema.ParentID.Value); } #endregion var sb = new StringBuilder(); parameters = new List <SqlParameter>(); didxParam = 0; var dvidxParam = 0; #region Find new refinements in list //Create a cache of all next keys var _nextKeys = new Dictionary <DimensionItem, long>(); //TODO: this is taking too long on every request (~1%) retval.Results.ForEach(z => _nextKeys.Add(z, z.RefinementList.OrderByDescending(x => x.DVIdx).Select(x => x.DVIdx).FirstOrDefault() + 1)); var paramIndex = 0; var needSave = false; if (list != null) { foreach (var item in list.Where(x => x.FieldValue != null)) { var values = new HashSet <string>(); var dimension = schema.FieldList.FirstOrDefault(x => x.Name == item.FieldName) as DimensionDefinition; if (dimension != null) { if (dimension.DataType == RepositorySchema.DataTypeConstants.List) { var l = (string[])item.FieldValue; foreach (var v in l) { if (!values.Contains(v)) { values.Add(v); } } } else { if ((dimension.DataType == RepositorySchema.DataTypeConstants.Int || dimension.DataType == RepositorySchema.DataTypeConstants.Int64) && dimension.NumericBreak != null && dimension.NumericBreak > 0) { var v = Convert.ToInt64(item.FieldValue); var scaled = ((v / dimension.NumericBreak) * dimension.NumericBreak).ToString(); if (!values.Contains(scaled)) { values.Add(scaled); } } else { var v = SqlHelper.GetTypedDimValue(dimension.DataType, item.FieldValue); if (!values.Contains(v)) { values.Add(v); } } } } //for unique values if not exist then insert foreach (var v in values?.Where(x => x != null).ToList()) { long baseDVIdx; if (schema.ParentID != null && diff.DimensionList.Any(x => x.DIdx == dimension.DIdx)) { baseDVIdx = ((dimension.DIdx - Constants.DGROUPEXT) + 1) * Constants.DVALUEGROUPEXT; //Child Repository } else { baseDVIdx = ((dimension.DIdx - Constants.DGROUP) + 1) * Constants.DVALUEGROUP; //Normal } var dbDimension = retval.Results.FirstOrDefault(x => x.DIdx == dimension.DIdx); if (!dbDimension.RefinementList.Any(x => x.FieldValue == v)) { if (!_nextKeys.ContainsKey(dbDimension)) //If was empty then default to base index { _nextKeys.Add(dbDimension, baseDVIdx); } if (_nextKeys[dbDimension] == 1) //If was empty then default to base index { _nextKeys[dbDimension] = baseDVIdx; } var nextDVIdx = _nextKeys[dbDimension]; _nextKeys[dbDimension]++; var newParam = new SqlParameter { DbType = DbType.String, IsNullable = false, ParameterName = $"@__z{paramIndex}", Value = v }; parameters.Add(newParam); paramIndex++; if (diff == null) { //This is for stand-alone tables. There is only one dimension table var paramDIdx = new SqlParameter { DbType = DbType.Int64, IsNullable = false, ParameterName = $"@__didx{didxParam}", Value = dimension.DIdx }; parameters.Add(paramDIdx); var paramDVIdx = new SqlParameter { DbType = DbType.Int64, IsNullable = false, ParameterName = $"@__dvidx{dvidxParam}", Value = nextDVIdx }; parameters.Add(paramDVIdx); didxParam++; dvidxParam++; sb.AppendLine($"if not exists(select * from [{dimensionValueTableName}] where [DIdx] = {paramDIdx.ParameterName} and [DVIdx] = {paramDVIdx.ParameterName})"); sb.AppendLine($"insert into [{dimensionValueTableName}] ([DIdx], [DVIdx], [Value]) values ({paramDIdx.ParameterName}, {paramDVIdx.ParameterName}, {newParam.ParameterName})"); } else { //This is for inherited tables. Figure out which dimension table to use var tempTable = dimensionValueTableNameParent; if (diff.DimensionList.Any(x => x.DIdx == dimension.DIdx)) { tempTable = dimensionValueTableName; } var paramDIdx = new SqlParameter { DbType = DbType.Int64, IsNullable = false, ParameterName = $"@__didx{didxParam}", Value = dimension.DIdx }; parameters.Add(paramDIdx); var paramDVIdx = new SqlParameter { DbType = DbType.Int64, IsNullable = false, ParameterName = $"@__dvidx{dvidxParam}", Value = nextDVIdx }; parameters.Add(paramDVIdx); didxParam++; dvidxParam++; sb.AppendLine($"if not exists(select * from [{tempTable}] where [DIdx] = {paramDIdx.ParameterName} and [DVIdx] = {paramDVIdx.ParameterName})"); sb.AppendLine($"insert into [{tempTable}] ([DIdx], [DVIdx], [Value]) values ({paramDIdx.ParameterName}, {paramDVIdx.ParameterName}, {newParam.ParameterName})"); } needSave = true; } } } if (needSave) { SqlHelper.ExecuteSql(ConfigHelper.ConnectionString, sb.ToString(), parameters, false); Clear(id); SqlHelper.MarkDimensionsChanged(id); retval = GetCache(context, id, schema); needSave = false; } } #endregion return(retval.Results); } } catch (Exception ex) { throw; } }
public string[] GetInternalTags(string suffix) { return(InternalTags == null?Array.Empty <string>() : InternalTags .Where(t => t.StartsWith(suffix, StringComparison.InvariantCulture)) .Select(t => t.Substring(suffix.Length)).ToArray()); }
static void Main(string[] args) { HashSet <string> partyInvited = new HashSet <string>(); string input = Console.ReadLine(); while (input.Equals("PARTY") == false) { if (input.Length == 8) { partyInvited.Add(input); } input = Console.ReadLine(); } input = Console.ReadLine(); while (input.Equals("END") == false) { partyInvited.Remove(input); input = Console.ReadLine(); } int count = partyInvited.Count; Console.WriteLine(count); /*foreach (string invited in partyInvited) * { * if (char.IsDigit(invited[0])) * { * Console.WriteLine(invited); * } * } * * foreach (string invited in partyInvited) * { * if (!char.IsDigit(invited[0])) * { * Console.WriteLine(invited); * } * } */ var vipInviated = partyInvited .Where(p => char.IsDigit(p.ToCharArray()[0])); if (vipInviated.Count() != 0) { Console.WriteLine( string.Join(Environment.NewLine, vipInviated)); } var regularInviated = partyInvited .Where(p => !char.IsDigit(p.ToCharArray()[0])); if (regularInviated.Count() != 0) { Console.WriteLine( string.Join(Environment.NewLine, regularInviated)); } }
ComputerModel GenerateModel() { ComputerModel model = new ComputerModel { Name = ChipName }; foreach (ReadLine line in ReadFile(ChipName, "", new Dictionary <string, string>())) { Console.WriteLine(string.Join(" ", line.Tokens)); switch (line.Tokens[0]) { case "input": case "output": case "pin": if (line.Tokens.Length != 3) { Console.Error.WriteLine("Invalid {0} command: invalid number of arguments ({1}:{2})", line.Tokens[0], line.FileName, line.LineNumber); return(model); } PinType type; switch (line.Tokens[0]) { case "input": type = PinType.Input; break; case "output": type = PinType.Output; break; default: type = PinType.Intermediate; break; } int width; if (!int.TryParse(line.Tokens[2], out width) || width < 1) { Console.Error.WriteLine("Invalid {0} command: invalid width '{1}' ({2}:{3})", line.Tokens[0], line.Tokens[2], line.FileName, line.LineNumber); return(model); } if (width == 1) { model.Pins.Add(new Pin { Type = type, Name = line.Tokens[1], NameOfType = line.Tokens[1], FirstOfType = true }); } else { model.Pins.AddRange(Enumerable.Range(1, width).Select(i => new Pin { Type = type, Name = string.Format("{0}{1}", line.Tokens[1], width - i), NameOfType = line.Tokens[1], FirstOfType = i == 1 })); } break; case "truth": if (line.Tokens.Length < 5) { Console.Error.WriteLine("Invalid truth command: invalid number of arguments ({0}:{1})", line.FileName, line.LineNumber); return(model); } TruthTable table = new TruthTable { Inputs = line.Tokens[1].Split(',').Select(s => model.Pins.FirstOrDefault(p => p.Name == s)).ToList(), Output = model.Pins.FirstOrDefault(p => p.Name == line.Tokens[2] && p.Type != PinType.Input) }; if (table.Inputs.Any(p => p == null) || table.Output == null) { Console.Error.WriteLine("Unknown pin reference in truth table ({0}:{1})", line.FileName, line.LineNumber); return(model); } if (line.Tokens.Length != 3 + Math.Pow(2, table.Inputs.Count)) { Console.Error.WriteLine("Invalid truth command: invalid number of arguments ({0}:{1})", line.FileName, line.LineNumber); return(model); } bool[] key = new bool[table.Inputs.Count]; Array.Fill(key, false); for (int i = 3; i < line.Tokens.Length; ++i) { bool value; switch (line.Tokens[i]) { case "0": value = false; break; case "1": value = true; break; default: Console.Error.WriteLine("Invalid truth command: invalid value ({0}:{1})", line.FileName, line.LineNumber); return(model); } bool[] copy = new bool[key.Length]; Array.Copy(key, copy, key.Length); table.Table[copy] = value; for (int j = key.Length - 1; j >= 0; --j) { if (key[j]) { key[j] = false; } else { key[j] = true; break; } } } model.TruthTables.Add(table); break; default: Console.Error.WriteLine("Invalid command '{0}' ({1}:{2})", line.Tokens[0], line.FileName, line.LineNumber); return(model); } } int offset = 0; foreach (Pin pin in model.Pins.Where(p => p.Type == PinType.Input)) { pin.Offset = offset++; } HashSet <Pin> unmapped = model.Pins.Where(p => p.Type == PinType.Intermediate).ToHashSet(); while (unmapped.Count > 0) { Pin[] toMap = unmapped.Where(p => model.TruthTables.Where(t => t.Output == p).All(t => t.Inputs.All(p2 => !unmapped.Contains(p2)))).ToArray(); foreach (Pin pin in toMap) { pin.Offset = offset++; unmapped.Remove(pin); } if (toMap.Length == 0) { Console.Error.WriteLine("Unable to map pins without recursion."); Console.Error.WriteLine("The following pins failed:"); foreach (Pin pin in unmapped) { Console.Error.WriteLine(" {0}", pin.Name); } return(model); } } foreach (Pin pin in model.Pins.Where(p => p.Type == PinType.Output)) { pin.Offset = offset++; } return(model); }
public HashSet <Person> GetPeopleAbove30() { return(people.Where(p => p.Age > 30).OrderBy(p => p.Name).ToHashSet()); }
public static IReadOnlyList <TDiffGram> ChangesSince <TPropertiesEnum, TDiffGram>(this IRecursiveDiffingType <TPropertiesEnum, TDiffGram> current, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> priorVersion) where TPropertiesEnum : struct where TDiffGram : struct { Requires.NotNull(current, "current"); Requires.NotNull(priorVersion, "priorVersion"); if (current == priorVersion) { return(System.Collections.Immutable.ImmutableList.Create <TDiffGram>()); } if (priorVersion.Identity != current.Identity) { throw new System.ArgumentException("Not another version of the same node.", "priorVersion"); } var currentAsParent = current as IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >; var currentAsRecursiveType = (IRecursiveDiffingType <TPropertiesEnum, TDiffGram>)current; var before = new HashSet <ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> > >(Comparers.Parented <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >()); var after = new HashSet <ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> > >(Comparers.Parented <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >()); var priorVersionAsParent = priorVersion as IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >; if (priorVersionAsParent != null) { before.UnionWith(priorVersionAsParent.GetSelfAndDescendentsWithParents <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >()); } else { before.Add(priorVersion.WithParent()); } if (currentAsParent != null) { after.UnionWith(currentAsParent.GetSelfAndDescendentsWithParents <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >()); } else { after.Add(current.WithParent()); } var added = new HashSet <ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> > >(Comparers.Parented <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >()); var removed = new HashSet <ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> > >(Comparers.Parented <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >()); var changed = new Dictionary <ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> > >(Comparers.Parented <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >()); var descendentsOfAddOrRemove = new HashSet <IRecursiveType>(Comparers.Identity); foreach (var fromBefore in before) { if (after.Contains(fromBefore)) { ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> > fromAfter; if (currentAsParent != null) { var parent = currentAsParent.GetParentedNode(fromBefore.Value.Identity); fromAfter = new ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >( (IRecursiveDiffingType <TPropertiesEnum, TDiffGram>)parent.Value, (IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >)parent.Parent); } else { fromAfter = new ParentedRecursiveType <IRecursiveParent <IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >, IRecursiveDiffingType <TPropertiesEnum, TDiffGram> >( fromBefore.Value.Identity == current.Identity ? (IRecursiveDiffingType <TPropertiesEnum, TDiffGram>)current : null); } if (!object.ReferenceEquals(fromBefore.Value, fromAfter.Value) || fromBefore.Parent.Identity != fromAfter.Parent.Identity) { changed.Add(fromBefore, fromAfter); } } else { removed.Add(fromBefore); } } foreach (var fromAfter in after) { if (!before.Contains(fromAfter)) { added.Add(fromAfter); } } foreach (var topLevelOperation in added.Concat(removed)) { descendentsOfAddOrRemove.UnionWith(topLevelOperation.Value.GetSelfAndDescendents().Skip(1)); } var history = new List <TDiffGram>(); history.AddRange(removed.Where(r => !descendentsOfAddOrRemove.Contains(r.Value)).Select(r => currentAsRecursiveType.Remove(r.Value))); foreach (var changedNode in changed) { var oldNode = changedNode.Key; var newNode = changedNode.Value; var diff = newNode.DiffProperties(oldNode); if (!currentAsRecursiveType.Equals(diff, default(TPropertiesEnum))) { history.Add(currentAsRecursiveType.Change(oldNode.Value, newNode.Value, diff)); } } history.AddRange(added.Where(a => !descendentsOfAddOrRemove.Contains(a.Value)).Select(a => currentAsRecursiveType.Add(a.Value))); return(history); }
private void Flush(bool refresh) { lock (this) { var filesChanged = zipFileInfos.Values.Where(c => c.ShadowFile != null).ToList(); var filesDeleted = zipFileInfos.Values.Where(c => !c.Exists && c.ZipEntry != null).ToList(); var setOfPreviousDirectories = new HashSet <string>(StringComparer.InvariantCultureIgnoreCase); foreach (ZLib.ZipEntry zipEntry in zipFile) { if (zipEntry.IsDirectory) { setOfPreviousDirectories.Add("/" + zipEntry.Name.Substring(0, zipEntry.Name.Length - 1)); } else { var x = zipEntry.Name.LastIndexOf('/'); if (x > 0) { var path = zipEntry.Name.Substring(0, x); setOfPreviousDirectories.Add("/" + path); } } } var setOfCurrentImplicitDirectories = new HashSet <string>(StringComparer.InvariantCultureIgnoreCase); var setOfCurrentDirectories = new HashSet <string>(StringComparer.InvariantCultureIgnoreCase); foreach (var zipFileInfo in zipFileInfos.Values) { if (zipFileInfo.Exists) { var x = zipFileInfo.AbsolutePath.LastIndexOf('/'); if (x > 0) { var path = zipFileInfo.AbsolutePath.Substring(0, x); setOfCurrentDirectories.Add(path); setOfCurrentImplicitDirectories.Add(path); } } } foreach (var zipDirectoryInfo in zipDirectoryInfos.Values.Where(c => c.Exists)) { setOfCurrentDirectories.Add(zipDirectoryInfo.AbsolutePath); } var setOfNewDirectories = new HashSet <string>(setOfCurrentDirectories.Where(c => !setOfPreviousDirectories.Contains(c)), StringComparer.InvariantCultureIgnoreCase); var setOfDeletedDirectories = new HashSet <string>(setOfPreviousDirectories.Where(c => !setOfCurrentDirectories.Contains(c)), StringComparer.InvariantCultureIgnoreCase); var setOfDirectoriesToCreate = new HashSet <string>(setOfNewDirectories.Where(c => !setOfCurrentImplicitDirectories.Contains(c)), StringComparer.InvariantCultureIgnoreCase); setOfDirectoriesToCreate.Remove("/"); if (filesChanged.Count > 0 || filesDeleted.Count > 0) { zipFile.BeginUpdate(); try { foreach (var zipFileInfo in filesChanged) { var shadowFile = zipFileInfo.ShadowFile; var name = zipFileInfo.AbsolutePath; try { zipFile.Add(new StreamDataSource(shadowFile.GetContent().GetInputStream()), name); } catch (FileNodeNotFoundException) { } } foreach (var zipFileInfo in filesDeleted) { zipFile.Delete(zipFileInfo.ZipEntry); } foreach (var directoryToCreate in setOfDirectoriesToCreate) { zipFile.AddDirectory(directoryToCreate); } foreach (var directory in setOfDeletedDirectories) { // SharpZipLib currently doesn't support removing explicit directories } } finally { zipFile.CommitUpdate(); } } if (refresh) { this.RefreshNodeInfos(); } } }
public IEnumerable <string> GetAllFilteredProcesses(HashSet <string> filter) { lock (_listLock) return(_presentMonProcesses?.Where(processName => !filter.Contains(processName))); }
public IEnumerable <IMarketData> GetMarketsFor(string marketCurrency, string baseCurrency) { return(_storage.Where(x => x.MarketCurrency == marketCurrency && x.BaseCurrency == baseCurrency).OrderByDescending(x => x.Bid)); }
public IEnumerable <Bodypart> GetBodyparts(Func <Bodypart, bool> predicate) { var retrievedBodyparts = bodyparts.Where(predicate); return(retrievedBodyparts); }
public override IEnumerable <Passport> OnReturn(HashSet <Passport> resources, ResourcePipeline pipeline) { return(resources.Where(passport => !passport.IsLocked).ToArray()); }
public static List <DynamoMetadataType> GetTables() { return(Types?.Where(x => x.IsTable).ToList() ?? new List <DynamoMetadataType>()); }
public BluetoothService() { Connecting += (sender, args) => IsConnecting = true; Connected += (sender, args) => IsConnecting = false; BluetoothErrorAsync += (sender, exception) => { IsConnecting = false; IsStreamConnected = false; }; Disconnected += (sender, exception) => { IsConnecting = false; IsStreamConnected = false; }; string[] requestedProperties = { "System.Devices.Aep.DeviceAddress", "System.Devices.Aep.IsConnected", "System.Devices.Aep.IsPaired" }; try { _deviceWatcher = DeviceInformation.CreateWatcher( "(System.Devices.Aep.ProtocolId:=\"{e0cbf06c-cd8b-4647-bb8a-263b43f0f974}\")", requestedProperties, DeviceInformationKind.AssociationEndpoint); _deviceWatcher.Added += (watcher, deviceInfo) => { Log.Debug($"WindowsRT.BluetoothService: Device added: {deviceInfo.Id}"); if (deviceInfo.Name != string.Empty) { _deviceCache?.Add(new BluetoothDeviceRT(deviceInfo)); } }; _deviceWatcher.Updated += (watcher, deviceInfoUpdate) => { Log.Debug($"WindowsRT.BluetoothService: Device updated: {deviceInfoUpdate?.Id}"); _deviceCache?.Where(x => x?.Id == deviceInfoUpdate?.Id).ToList().ForEach(async x => { if (string.Equals(x.Address, _bluetoothDevice?.BluetoothAddressAsString(), StringComparison.CurrentCultureIgnoreCase) && (deviceInfoUpdate?.Properties?.ContainsKey("System.Devices.Aep.IsConnected") ?? false)) { if ((bool)deviceInfoUpdate.Properties["System.Devices.Aep.IsConnected"]) { Log.Debug($"WindowsRT.BluetoothService: Target device connected"); await ConnectAsync(x.Address, _service?.ServiceId?.AsString() ?? "{00001101-0000-1000-8000-00805F9B34FB}"); } else { Log.Debug($"WindowsRT.BluetoothService: Target device disconnected"); Disconnected?.Invoke(this, "Device disconnected"); } } }); _deviceCache?.Where(x => deviceInfoUpdate?.Id == x?.Id) .ToList() .ForEach(x => x?.Update(deviceInfoUpdate ?? null)); }; _deviceWatcher.Removed += (watcher, deviceInfoUpdate) => { Log.Debug($"WindowsRT.BluetoothService: Device removed: {deviceInfoUpdate.Id}"); _deviceCache?.Where(x => x?.Id == deviceInfoUpdate?.Id).ToList().ForEach(x => { if (string.Equals(x?.Address, _bluetoothDevice?.BluetoothAddressAsString(), StringComparison.CurrentCultureIgnoreCase)) { Log.Debug($"WindowsRT.BluetoothService: Target device removed"); Disconnected?.Invoke(this, "Device was removed/un-paired from Windows. Please check your computer's bluetooth settings."); } }); _deviceCache?.RemoveWhere(x => x?.Id == deviceInfoUpdate?.Id); }; _deviceWatcher.EnumerationCompleted += (watcher, obj) => { Log.Debug("WindowsRT.BluetoothService: Device enumeration complete"); }; _deviceWatcher.Stopped += (watcher, obj) => { Log.Warning("WindowsRT.BluetoothService: Device watcher stopped"); }; _deviceWatcher.Start(); Log.Debug("WindowsRT.BluetoothService: Device watcher launched"); } catch (ArgumentException ex) { Log.Error( $"WindowsRT.BluetoothService: Failed to set up device watcher. Protocol GUID probably not found. Details: {ex}"); throw new PlatformNotSupportedException("Failed to set up device watcher. Make sure you have a compatible Bluetooth driver installed."); } }
/// <summary> /// Updating the repo: /// 1. load saved share meta data (meta data will be as current as last time online user was online) /// 2. load current state of share folder /// 3. compare the two sets for adds/mods/deletes /// 4. push changes to the server, updating meta data on success /// a. if there is a conflicted file, copy it locally using name server gives /// 5. ask server for current file list /// 6. compare this list against local share /// 7. download out-of-date and new files, updating meta data as we go /// 8. write out meta data /// </summary> public void SyncWithTheCloud() { // 1. load saved share meta data (meta data will be as current as last time online user was online) ShareMetaData = this.loadMetaData(); // 2. load current state of share folder LocalShare = this.GetFiles(); // 3. compare the two sets for adds/mods/deletes HashSet <File> changeSet = this.GetChangeSet(this.LocalShare, this.ShareMetaData, new StartupComparer()); // 4. push changes to the server, updating meta data on success foreach (File f in changeSet) { // differentiate deletes from adds if (f.content == null) // delete { File deleteResult = OrangeCloudServer.DeleteFile(f); // if the file is null, the server didnt respond // we'll do nothing now and try to push again later if (deleteResult == null) { continue; } // if server returns -1, we have a conflict else if (deleteResult.version == -1) { // do something } // operation was successful with server else { // update file meta data File metaFile = ShareMetaData.Where(file => file.fullPath == f.fullPath).First(); if (metaFile != null) { ShareMetaData.Remove(metaFile); } } } else // add { File addResult = OrangeCloudServer.Add(f); // if the file is null, the server didnt respond // we'll do nothing now and try to push again later if (addResult == null) { continue; } // if server returns -1, we have a conflict else if (addResult.version == -1) { // do something } // operation was successful with server else { // update file meta data // if the file is in the meta data already if (ShareMetaData.Contains(f)) { // update the version ShareMetaData.SingleOrDefault(file => file.fullPath == f.fullPath).version = addResult.version; } // add it to the meta data else { f.version = addResult.version; ShareMetaData.Add(f); } } } } changeSet.RemoveWhere(files => files.content == null); // 5. ask server for current file list HashSet <File> ServerList = OrangeCloudServer.GetFiles(); // 6. compare this list against local share HashSet <File> newFiles = this.GetChangeSet(ServerList, LocalShare, new DefaultComparer()); // 7. download out-of-date and new files, updating meta data as we go this.DownloadNewFiles(newFiles); // 8. write out meta data System.IO.File.WriteAllText(Environment.CurrentDirectory + "\\metadata.json", JsonConvert.SerializeObject(ShareMetaData)); }
public async Task <Player[]> FindPlayers(Func <Player, bool> filter) { var players = cache.Where(filter).ToArray(); return(await Task.FromResult(players)); }
public ModelPlayToClient playInfo(ModelPlayToServer packet, ServiceContext context) { lock (context.Player) { var data = Repository.GetData; var timeNow = DateTime.UtcNow; var toClient = new ModelPlayToClient(); toClient.UpdateTime = timeNow; if (packet.GetPlayersInfo != null && packet.GetPlayersInfo.Count > 0) { var pSee = StaticHelper.PartyLoginSee(context.Player); var pGet = new HashSet <string>(packet.GetPlayersInfo); pGet.IntersectWith(pSee); toClient.PlayersInfo = pGet .Where(l => Repository.GetData.PlayersAllDic.ContainsKey(l)) .Select(l => Repository.GetData.PlayersAllDic[l].Public) .ToList(); } if (packet.SaveFileData != null && packet.SaveFileData.Length > 0) { Repository.GetSaveData.SavePlayerData(context.Player.Public.Login, packet.SaveFileData, packet.SingleSave); context.Player.Public.LastSaveTime = timeNow; Repository.Get.ChangeData = true; } context.Player.Public.LastTick = packet.LastTick; if (context.Player.GetKeyReconnect()) { toClient.KeyReconnect = context.Player.KeyReconnect1; } var pLogin = context.Player.Public.Login; //packet.WObjects тут все объекты этого игрока, добавляем которых у нас нет var pWOs = packet.WObjects ?? new List <WorldObjectEntry>(); //packet.WObjectsToDelete тут те объекты этого игрока, что нужно удалить var pDs = packet.WObjectsToDelete ?? new List <WorldObjectEntry>(); //передаем назад объекты у которых не было ServerId, т.е. они новые для сервера + все с изменениями var outWO = new List <WorldObjectEntry>(); var outWOD = new List <WorldObjectEntry>(); //это первое обращение, если не прислали своих объектов var first = pWOs.Count == 0; lock (data) { for (int i = 0; i < pDs.Count; i++) { if (pDs[i].LoginOwner != context.Player.Public.Login) { continue; } var sid = pDs[i].ServerId; var pD = data.WorldObjects.FirstOrDefault(p => p.ServerId == sid); if (pD != null) { //удаление из базы pD.UpdateTime = timeNow; data.WorldObjects.Remove(pD); data.WorldObjectsDeleted.Add(pD); } } for (int i = 0; i < pWOs.Count; i++) { if (pWOs[i].LoginOwner != context.Player.Public.Login) { continue; // <-на всякий случай } var sid = pWOs[i].ServerId; if (sid == 0) { //добавление в базу pWOs[i].UpdateTime = timeNow; pWOs[i].ServerId = data.GetWorldObjectEntryId(); data.WorldObjects.Add(pWOs[i]); outWO.Add(pWOs[i]); continue; } var WO = data.WorldObjects.FirstOrDefault(p => p.ServerId == sid); if (WO != null) { //данный объект уже есть в базу обновляем по нему информкацию if (WO.Name != pWOs[i].Name) { WO.UpdateTime = timeNow; WO.Name = pWOs[i].Name; } if (WO.FreeWeight != pWOs[i].FreeWeight) { WO.UpdateTime = timeNow; WO.FreeWeight = pWOs[i].FreeWeight; } if (WO.MarketValue != pWOs[i].MarketValue) { WO.UpdateTime = timeNow; WO.MarketValue = pWOs[i].MarketValue; } if (WO.MarketValuePawn != pWOs[i].MarketValuePawn) { WO.UpdateTime = timeNow; WO.MarketValuePawn = pWOs[i].MarketValuePawn; } if (WO.Tile != pWOs[i].Tile) { WO.UpdateTime = timeNow; WO.Tile = pWOs[i].Tile; } } else { Loger.Log("PlayInfo find error add WO: " + pWOs[i].Name + " sid=" + sid); } } //передаем объекты других игроков + при первом обращении first for (int i = 0; i < data.WorldObjects.Count; i++) { if (data.WorldObjects[i].UpdateTime < packet.UpdateTime) { continue; } if (!first && data.WorldObjects[i].LoginOwner == pLogin) { continue; } outWO.Add(data.WorldObjects[i]); } //передаем удаленные объекты других игроков (не для первого запроса) if (packet.UpdateTime > DateTime.MinValue) { for (int i = 0; i < data.WorldObjectsDeleted.Count; i++) { if (data.WorldObjectsDeleted[i].UpdateTime < packet.UpdateTime) { //Удаляем все записи сроком старше 2х минут (их нужно хранить время между тем как игрок у которого удалился караван зальёт это на сервер, и все другие онлайн игроки получат эту инфу, а обновление идет раз в 5 сек) if ((timeNow - data.WorldObjectsDeleted[i].UpdateTime).TotalSeconds > 120000) { data.WorldObjectsDeleted.RemoveAt(i--); } continue; } if (data.WorldObjectsDeleted[i].LoginOwner == pLogin) { continue; } outWOD.Add(data.WorldObjectsDeleted[i]); } } //завершили сбор информации клиенту toClient.WObjects = outWO; toClient.WObjectsToDelete = outWOD; context.Player.LastUpdateTime = timeNow; } //прикрепляем письма //если есть команда на отключение без сохранения, то посылаем только одно это письмо var md = context.Player.Mails.FirstOrDefault(m => m.Type == ModelMailTradeType.AttackCancel); if (md == null) { toClient.Mails = context.Player.Mails; context.Player.Mails = new List <ModelMailTrade>(); } else { toClient.Mails = new List <ModelMailTrade>() { md }; context.Player.Mails.Remove(md); } //команда выполнить сохранение и отключиться toClient.NeedSaveAndExit = !context.Player.IsAdmin && data.EverybodyLogoff; //флаг, что на клиента кто-то напал и он должен запросить подробности toClient.AreAttacking = context.Player.AttackData != null && context.Player.AttackData.Host == context.Player && context.Player.AttackData.State == 1; return(toClient); } }
public IEnumerable <String> GetSecurityDBNames(String ServerName = null) { return((String.IsNullOrWhiteSpace(ServerName)) ? _ICP4SecurityServers?.Select(x => x.SecurityDB.Name) : _ICP4SecurityServers?.Where(x => x.Name == ServerName) ?.Select(x => x.SecurityDB.Name)); }
/// <summary> /// Constructor /// </summary> /// <param name="InputFile">The input file that this output file corresponds to</param> public OutputFile(SourceFile InputFile, OutputFile HeaderFile, IEnumerable <OutputFile> PreviousFiles, List <OutputFileInclude> Includes, List <SourceFile> InputFileStack, Dictionary <Symbol, OutputFile> FwdSymbolToHeader, bool bMakeStandalone, TextWriter Log) { this.InputFile = InputFile; this.Includes = Includes; Debug.Assert(HeaderFile == null || (InputFile.Flags & SourceFileFlags.TranslationUnit) != 0); // Traverse through all the included headers, figuring out the first unique include for each file and fragment HashSet <OutputFile> VisitedFiles = new HashSet <OutputFile>(); HashSet <SourceFragment> VisitedFragments = new HashSet <SourceFragment>(); // Go through the standalone headers first OutputFile MonolithicHeader = null; if (HeaderFile == null && (InputFile.Flags & SourceFileFlags.Standalone) != 0 && (InputFile.Flags & SourceFileFlags.External) == 0 && (InputFile.Flags & SourceFileFlags.Aggregate) == 0) { // Insert a dummy include to receive all the inserted headers OutputFileInclude ImplicitInclude = new OutputFileInclude(-1, null); ImplicitInclude.ExpandedReferences = new List <OutputFileReference>(); Includes.Insert(0, ImplicitInclude); // Determine which monolithic header to use IEnumerable <OutputFile> PotentialMonolithicHeaders = PreviousFiles.Union(Includes.Select(x => x.TargetFile).Where(x => x != null).SelectMany(x => x.IncludedFiles)); if (InputFile.Module != null && InputFile.Module.PublicDependencyModules.Union(InputFile.Module.PrivateDependencyModules).Any(x => x.Name == "Core")) { MonolithicHeader = PotentialMonolithicHeaders.FirstOrDefault(x => (x.InputFile.Flags & SourceFileFlags.IsCoreMinimal) != 0); } else { MonolithicHeader = PotentialMonolithicHeaders.FirstOrDefault(x => (x.InputFile.Flags & SourceFileFlags.IsCoreTypes) != 0); } // Update the dependencies to treat all the contents of a monolithic header as pinned if (MonolithicHeader != null) { SourceFragment[] UniqueFragments = MonolithicHeader.IncludedFragments.Except(VisitedFragments).ToArray(); ImplicitInclude.ExpandedReferences.Add(new OutputFileReference(MonolithicHeader, UniqueFragments)); VisitedFragments.UnionWith(UniqueFragments); VisitedFiles.Add(MonolithicHeader); } // Insert all the forward declaration headers, but only treat them as supplying the forward declarations themselves. They may happen to include // some utility classes (eg. TSharedPtr), and we don't want to include an unrelated header to satisfy that dependency. foreach (OutputFile FwdHeader in FwdSymbolToHeader.Values) { FindExpandedReferences(FwdHeader, ImplicitInclude.ExpandedReferences, VisitedFiles, VisitedFragments, true); } // Add all the other files if (bMakeStandalone) { foreach (OutputFile PreviousFile in PreviousFiles) { if ((PreviousFile.InputFile.Flags & SourceFileFlags.Inline) == 0 && (PreviousFile.InputFile.Flags & SourceFileFlags.Pinned) == 0 && VisitedFiles.Add(PreviousFile)) { SourceFragment[] UniqueFragments = PreviousFile.IncludedFragments.Except(VisitedFragments).ToArray(); ImplicitInclude.ExpandedReferences.Add(new OutputFileReference(PreviousFile, UniqueFragments)); VisitedFragments.UnionWith(UniqueFragments); } } } } // Figure out a list of files which are uniquely reachable through each include. Force an include of the matching header as the first thing. foreach (OutputFileInclude Include in Includes) { if (Include.ExpandedReferences == null) { Include.ExpandedReferences = new List <OutputFileReference>(); if (Include == Includes[0] && HeaderFile != null) { Include.ExpandedReferences.Add(new OutputFileReference(HeaderFile, HeaderFile.IncludedFragments)); VisitedFragments.UnionWith(HeaderFile.IncludedFragments); } FindExpandedReferences(Include.TargetFile, Include.ExpandedReferences, VisitedFiles, VisitedFragments, true); } } // Find all the symbols which are referenced by this file HashSet <SourceFragment> FragmentsWithReferencedSymbols = new HashSet <SourceFragment>(); foreach (SourceFragment Fragment in InputFile.Fragments) { foreach (KeyValuePair <Symbol, SymbolReferenceType> ReferencedSymbol in Fragment.ReferencedSymbols) { if (ReferencedSymbol.Value == SymbolReferenceType.RequiresDefinition) { FragmentsWithReferencedSymbols.Add(ReferencedSymbol.Key.Fragment); } } } // Aggregate headers are designed to explicitly include headers from the current module. Expand out a list of them, so they can be included when encountered. HashSet <OutputFile> ExplicitIncludes = new HashSet <OutputFile>(); if ((InputFile.Flags & SourceFileFlags.Aggregate) != 0) { foreach (OutputFileInclude Include in Includes) { ExplicitIncludes.UnionWith(Include.ExpandedReferences.Where(x => x.File.InputFile.Location.IsUnderDirectory(InputFile.Location.Directory)).Select(x => x.File)); } foreach (OutputFileInclude Include in Includes) { ExplicitIncludes.Remove(Include.TargetFile); } } // Create the list of remaining dependencies for this file, and add any forward declarations Dependencies = new HashSet <SourceFragment>(); AddForwardDeclarations(InputFile, ForwardDeclarations, Dependencies, FwdSymbolToHeader); // Reduce the list of includes to those that are required. for (int FragmentIdx = InputFile.Fragments.Length - 1, IncludeIdx = Includes.Count - 1; FragmentIdx >= 0; FragmentIdx--) { // Update the dependency lists for this fragment SourceFragment InputFragment = InputFile.Fragments[FragmentIdx]; if (InputFragment.Dependencies != null) { Dependencies.UnionWith(InputFragment.Dependencies); } Dependencies.Remove(InputFragment); // Scan backwards through the list of includes, expanding each include to those which are required int MarkupMin = (FragmentIdx == 0)? -1 : InputFragment.MarkupMin; for (; IncludeIdx >= 0 && Includes[IncludeIdx].MarkupIdx >= MarkupMin; IncludeIdx--) { OutputFileInclude Include = Includes[IncludeIdx]; // Always include the same header for aggregates if ((InputFile.Flags & SourceFileFlags.Aggregate) != 0) { Include.FinalFiles.Insert(0, Include.TargetFile); Dependencies.ExceptWith(Include.TargetFile.IncludedFragments); Dependencies.UnionWith(Include.TargetFile.Dependencies); } // Include any indirectly included files for (int Idx = Include.ExpandedReferences.Count - 1; Idx >= 0; Idx--) { // Make sure we haven't already added it above OutputFileReference Reference = Include.ExpandedReferences[Idx]; if (!Include.FinalFiles.Contains(Reference.File)) { if (Dependencies.Any(x => Reference.UniqueFragments.Contains(x)) || (Reference.File.InputFile.Flags & SourceFileFlags.Pinned) != 0 || Reference.File == HeaderFile || Reference.File == MonolithicHeader || ExplicitIncludes.Contains(Reference.File) || ((InputFile.Flags & SourceFileFlags.Aggregate) != 0 && Reference.File == Include.TargetFile) || // Always include the original header for aggregates. They are written explicitly to include certain files. Reference.UniqueFragments.Any(x => FragmentsWithReferencedSymbols.Contains(x))) { Include.FinalFiles.Insert(0, Reference.File); Dependencies.ExceptWith(Reference.File.IncludedFragments); Dependencies.UnionWith(Reference.File.Dependencies); } } } } } // Remove any includes that are already included by the matching header if (HeaderFile != null) { HashSet <OutputFile> HeaderIncludedFiles = new HashSet <OutputFile>(HeaderFile.Includes.SelectMany(x => x.FinalFiles)); foreach (OutputFileInclude Include in Includes) { Include.FinalFiles.RemoveAll(x => HeaderIncludedFiles.Contains(x)); } } // Check that all the dependencies have been satisfied if (Dependencies.Count > 0) { // Find those which are completely invalid List <SourceFragment> InvalidDependencies = Dependencies.Where(x => !InputFileStack.Contains(x.File)).ToList(); if (InvalidDependencies.Count > 0) { Log.WriteLine("warning: {0} does not include {1}; may have missing dependencies.", InputFile, String.Join(", ", InvalidDependencies.Select(x => x.Location.FullName))); } Dependencies.ExceptWith(InvalidDependencies); // Otherwise warn about those which were not pinned foreach (SourceFile DependencyFile in Dependencies.Select(x => x.File)) { Log.WriteLine("warning: {0} is included by {1} ({2}), but depends on it and should be marked as pinned.", InputFile, DependencyFile, String.Join(" -> ", InputFileStack.SkipWhile(x => x != DependencyFile).Select(x => x.Location.GetFileName()))); } // Mark it as non-standalone and pinned InputFile.Flags = (InputFile.Flags | SourceFileFlags.Pinned) & ~SourceFileFlags.Standalone; } // Do one more forward pass through all the headers, and remove anything that's included more than once. That can happen if we have a referenced symbol as well as // an explicit include, for example. HashSet <OutputFile> FinalIncludes = new HashSet <OutputFile>(); foreach (OutputFileInclude Include in Includes) { for (int Idx = 0; Idx < Include.FinalFiles.Count; Idx++) { if (!FinalIncludes.Add(Include.FinalFiles[Idx])) { Include.FinalFiles.RemoveAt(Idx); Idx--; } } } // Build the list of satisfied dependencies IncludedFragments = new HashSet <SourceFragment>(); IncludedFragments.UnionWith(Includes.SelectMany(x => x.FinalFiles).SelectMany(x => x.IncludedFragments)); IncludedFragments.UnionWith(InputFile.Fragments); // Build the list of all the included files, so other output files that include it can expand it out. IncludedFiles = new HashList <OutputFile>(); IncludedFiles.UnionWith(Includes.SelectMany(x => x.FinalFiles).SelectMany(x => x.IncludedFiles)); IncludedFiles.Add(this); }
public string GetCode(MetadataTypes metadata, IRequest request, INativeTypesMetadata nativeTypes) { var typeNamespaces = new HashSet <string>(); metadata.RemoveIgnoredTypes(Config); metadata.Types.Each(x => typeNamespaces.Add(x.Namespace)); metadata.Operations.Each(x => typeNamespaces.Add(x.Request.Namespace)); var defaultImports = !Config.DefaultImports.IsEmpty() ? Config.DefaultImports : DefaultImports; // Look first for shortest Namespace ending with `ServiceModel` convention, else shortest ns var globalNamespace = Config.GlobalNamespace ?? typeNamespaces.Where(x => x.EndsWith("ServiceModel")) .OrderBy(x => x).FirstOrDefault() ?? typeNamespaces.OrderBy(x => x).First(); Func <string, string> defaultValue = k => request.QueryString[k].IsNullOrEmpty() ? "//" : ""; var sbInner = StringBuilderCache.Allocate(); var sb = new StringBuilderWrapper(sbInner); sb.AppendLine("/* Options:"); sb.AppendLine("Date: {0}".Fmt(DateTime.Now.ToString("s").Replace("T", " "))); sb.AppendLine("Version: {0}".Fmt(Env.ServiceStackVersion)); sb.AppendLine("Tip: {0}".Fmt(HelpMessages.NativeTypesDtoOptionsTip.Fmt("//"))); sb.AppendLine("BaseUrl: {0}".Fmt(Config.BaseUrl)); sb.AppendLine(); sb.AppendLine("{0}GlobalNamespace: {1}".Fmt(defaultValue("GlobalNamespace"), Config.GlobalNamespace)); sb.AppendLine("{0}ExportAsTypes: {1}".Fmt(defaultValue("ExportAsTypes"), Config.ExportAsTypes)); sb.AppendLine("{0}MakePropertiesOptional: {1}".Fmt(defaultValue("MakePropertiesOptional"), Config.MakePropertiesOptional)); sb.AppendLine("{0}AddServiceStackTypes: {1}".Fmt(defaultValue("AddServiceStackTypes"), Config.AddServiceStackTypes)); sb.AppendLine("{0}AddResponseStatus: {1}".Fmt(defaultValue("AddResponseStatus"), Config.AddResponseStatus)); sb.AppendLine("{0}AddImplicitVersion: {1}".Fmt(defaultValue("AddImplicitVersion"), Config.AddImplicitVersion)); sb.AppendLine("{0}IncludeTypes: {1}".Fmt(defaultValue("IncludeTypes"), Config.IncludeTypes.Safe().ToArray().Join(","))); sb.AppendLine("{0}ExcludeTypes: {1}".Fmt(defaultValue("ExcludeTypes"), Config.ExcludeTypes.Safe().ToArray().Join(","))); sb.AppendLine("{0}DefaultImports: {1}".Fmt(defaultValue("DefaultImports"), defaultImports.Join(","))); sb.AppendLine("*/"); sb.AppendLine(); string lastNS = null; var existingTypes = new HashSet <string>(); var requestTypes = metadata.Operations.Select(x => x.Request).ToHashSet(); var requestTypesMap = metadata.Operations.ToSafeDictionary(x => x.Request); var responseTypes = metadata.Operations .Where(x => x.Response != null) .Select(x => x.Response).ToHashSet(); var types = metadata.Types.ToHashSet(); var allTypes = new List <MetadataType>(); allTypes.AddRange(types); allTypes.AddRange(responseTypes); allTypes.AddRange(requestTypes); allTypes.RemoveAll(x => x.IgnoreType(Config)); //TypeScript doesn't support reusing same type name with different generic airity var conflictPartialNames = allTypes.Map(x => x.Name).Distinct() .GroupBy(g => g.SplitOnFirst('`')[0]) .Where(g => g.Count() > 1) .Select(g => g.Key) .ToList(); this.conflictTypeNames = allTypes .Where(x => conflictPartialNames.Any(name => x.Name.StartsWith(name))) .Map(x => x.Name); defaultImports.Each(x => sb.AppendLine("import {0};".Fmt(x))); sb.AppendLine(); var moduleDef = Config.ExportAsTypes ? "" : "declare "; sb.AppendLine("{0}module {1}".Fmt(moduleDef, globalNamespace.SafeToken())); sb.AppendLine("{"); //ServiceStack core interfaces foreach (var type in allTypes) { var fullTypeName = type.GetFullName(); if (requestTypes.Contains(type)) { if (!existingTypes.Contains(fullTypeName)) { MetadataType response = null; MetadataOperationType operation; if (requestTypesMap.TryGetValue(type, out operation)) { response = operation.Response; } lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { ImplementsFn = () => { if (!Config.AddReturnMarker && !type.ReturnVoidMarker && type.ReturnMarkerTypeName == null) { return(null); } if (type.ReturnVoidMarker) { return("IReturnVoid"); } if (type.ReturnMarkerTypeName != null) { return(Type("IReturn`1", new[] { Type(type.ReturnMarkerTypeName).InDeclarationType() })); } return(response != null ? Type("IReturn`1", new[] { Type(response.Name, response.GenericArgs).InDeclarationType() }) : null); }, IsRequest = true, }); existingTypes.Add(fullTypeName); } } else if (responseTypes.Contains(type)) { if (!existingTypes.Contains(fullTypeName) && !Config.IgnoreTypesInNamespaces.Contains(type.Namespace)) { lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { IsResponse = true, }); existingTypes.Add(fullTypeName); } } else if (types.Contains(type) && !existingTypes.Contains(fullTypeName)) { lastNS = AppendType(ref sb, type, lastNS, new CreateTypeOptions { IsType = true }); existingTypes.Add(fullTypeName); } } sb.AppendLine(); sb.AppendLine("}"); return(StringBuilderCache.ReturnAndFree(sbInner)); }