public static void SaveVoiceStimulus(ICLIFlags flags, string path, SaveContext context, FindLogic.Combo.VoiceLineInstanceInfo voiceLineInstanceInfo) { var saveSubtitles = true; if (flags is ExtractFlags extractFlags) { saveSubtitles = extractFlags.SubtitlesWithSounds; } var realPath = path; var soundSet = new HashSet <ulong>(voiceLineInstanceInfo.SoundFiles.Where(x => x != 0)); string overrideName = null; if (saveSubtitles) { IEnumerable <string> subtitle = new HashSet <string>(); if (context.m_info.m_subtitles.TryGetValue(voiceLineInstanceInfo.Subtitle, out var subtitleInfo)) { subtitle = subtitle.Concat(subtitleInfo.m_text); } if (context.m_info.m_subtitles.TryGetValue(voiceLineInstanceInfo.SubtitleRuntime, out var subtitleRuntimeInfo)) { subtitle = subtitle.Concat(subtitleRuntimeInfo.m_text); } var subtitleSet = new HashSet <string>(subtitle); if (subtitleSet.Any()) { if (soundSet.Count > 1) { realPath = Path.Combine(realPath, GetValidFilename(subtitleSet.First().Trim().TrimEnd('.'))); WriteFile(string.Join("\n", subtitleSet), Path.Combine(realPath, $"{teResourceGUID.LongKey(voiceLineInstanceInfo.Subtitle):X8}-{teResourceGUID.LongKey(voiceLineInstanceInfo.SubtitleRuntime):X8}-subtitles.txt")); } else if (soundSet.Count == 1) { try { overrideName = GetValidFilename($"{teResourceGUID.AsString(soundSet.First())}-{subtitleSet.First().TrimEnd('.')}"); if (overrideName.Length > 128) { overrideName = overrideName.Substring(0, 100); } WriteFile(string.Join("\n", subtitleSet), Path.Combine(realPath, $"{overrideName}.txt")); } catch { overrideName = teResourceGUID.AsString(soundSet.First()); WriteFile(string.Join("\n", subtitleSet), Path.Combine(realPath, $"{overrideName}.txt")); } } } } foreach (ulong soundFile in soundSet) { SaveSoundFile(flags, realPath, context, soundFile, true, overrideName); } }
public static T[] FindComponents <T>(this UnityEngine.GameObject g, bool in_parent = true, bool in_children = true, int sibling_depth = 0, bool ignore_self = false) where T : Component { HashSet <T> components = new HashSet <T>(); if (ignore_self) { if (in_children) { foreach (Transform child in g.transform) { components.Concat(child.GetComponentsInChildren <T>()); } } if (in_parent) { components.Concat(g.transform.parent.GetComponentsInParent <T>()); } return(components.ToArray()); } if (!in_children && !in_parent) { return(g.GetComponents <T>()); } if (in_children) { components.Concat(g.GetComponentsInChildren <T>()); } if (in_parent && g.transform.parent) { components.Concat(g.transform.parent.GetComponentsInParent <T>()); } GameObject current = g; GameObject last = g; while (sibling_depth > 0) { current = current.transform.parent.gameObject; if (!current) { break; } components.Concat(current.GetComponentsInChildren <T>()); sibling_depth--; } return(components.ToArray()); }
/// <summary> /// Finalizes the result. /// </summary> protected override IGeometry FinalizeResult() { if (Source.VertexCount == 0) { return(null); } // if only points are in the graph if (_lines.Count == 0 && _polygons.Count == 0) { if (_points.Count == 1) { return(_points.First()); } return(_factory.CreateMultiPoint(_points)); } // if no faces are extracted else if (_polygons.Count == 0) { if (_points.Count == 0) { if (_lines.Count == 1) { return(_lines.First()); } return(_factory.CreateMultiLineString(_lines)); } return(_factory.CreateGeometryCollection(_points.Concat <IGeometry>(_lines))); } // if faces are extracted if (_points.Count == 0 && _lines.Count == 0) { if (_polygons.Count == 1) { return(_polygons.First()); } else { return(_factory.CreateMultiPolygon(_polygons)); } } else { return(_factory.CreateGeometryCollection(_points.Concat <IGeometry>(_lines).Concat <IGeometry>(_polygons))); } }
public IEnumerable <T> GetData() { // TODO: Improve performance here var childData = _edges.Values.Select((e) => e.Target).SelectMany((t) => t.GetData()); return(_data.Concat(childData).Distinct()); }
/// <summary> /// フィールドの2次元配列からマップのインスタンスを生成します /// </summary> /// <param name="fieldArray"></param> /// <returns></returns> Map InstantiateMap(FieldTypes[,] fieldArray) { var toPosition = new Func <int, Point>(i => new Point( i % fieldArray.GetLength(0), i / fieldArray.GetLength(0))); // ゴール地点を抽出します var goalPositions = new HashSet <Point>( Enumerable.Range(0, fieldArray.Length).Select(toPosition) .Where(pos => fieldArray[pos.X, pos.Y] == FieldTypes.Goal)); // プレイヤー地点を抽出します var playerPosition = Enumerable.Range(0, fieldArray.Length).Select(toPosition) .Where(pos => fieldArray[pos.X, pos.Y] == FieldTypes.Player) .First(); // マップのフィールド配列では、移動の障害となり得るものだけ管理します // フィールド配列のプレイヤー地点とゴール地点をスペースに置き換えます foreach (var pos in goalPositions.Concat(new[] { playerPosition })) { fieldArray[pos.X, pos.Y] = FieldTypes.Space; } return(new Map(fieldArray, playerPosition, goalPositions)); }
/// <summary> /// Removes all of the invalid tickets from the initial input. /// </summary> /// <returns>Error rate (sum of all the invalid values).</returns> private int RemoveInvalidTickets() { int errorRate = 0; var fieldKeys = _fields.Values.ToArray(); List <int[]> invalidTickets = new List <int[]>(); HashSet <int> allValidValues = new HashSet <int>(); foreach (int[] field in fieldKeys) { allValidValues = allValidValues.Concat(field).ToHashSet(); } foreach (int[] ticket in _nearbyTickets) { int tempErrorRate = ticket.Except(allValidValues).Sum(); if (tempErrorRate > 0 || ticket.Contains(0)) { errorRate += tempErrorRate; invalidTickets.Add(ticket); } } for (int i = 0; i < invalidTickets.Count; i++) { _nearbyTickets.Remove(invalidTickets[i]); } return(errorRate); }
/// <summary> /// Uses a djikstra-like algorithm to flood the graph from the start /// node until the target node is found. /// All visited nodes have their distance from the start node updated. /// </summary> /// <param name="start">The starting node.</param> /// <param name="target">The target node.</param> /// <param name="front">The last newly found nodes.</param> /// <param name="visited">The already visited nodes.</param> /// <param name="distFromStart">The traversed distance from the /// starting node in edges.</param> /// <returns>The distance from the start node to the target node.</returns> /// <remarks> - Currently the target node is never found if contained /// in front or visited. /// - If front = { start }, then distFromStart should be 0.</remarks> public int dijkstraStep(GraphNode start, GraphNode target, HashSet<GraphNode> front, HashSet<GraphNode> visited, int distFromStart) { HashSet<GraphNode> newFront = new HashSet<GraphNode>(); HashSet<GraphNode> newVisited = new HashSet<GraphNode>(visited); newVisited.Concat(front); foreach (GraphNode node in front) { newVisited.Add(node); foreach (GraphNode adjacentNode in node.Adjacent) { if (adjacentNode == target) return distFromStart + 1; // Could be combined in newVisited... if (visited.Contains(adjacentNode)) continue; if (front.Contains(adjacentNode)) continue; newFront.Add(adjacentNode); } } // This wouldn't need recursion, but it's more convenient this way. if (newFront.Count > 0) return dijkstraStep(start, target, newFront, newVisited, distFromStart + 1); throw new GraphNotConnectedException(); }
private int CombineNewAndReusedRoomsIntoContiguousGroups() { int num = 0; foreach (VehicleRoom room in reusedOldRooms) { room.newOrReusedRoomGroupIndex = -1; } foreach (VehicleRoom room2 in reusedOldRooms.Concat(newRooms)) { if (room2.newOrReusedRoomGroupIndex < 0) { tmpRoomStack.Clear(); tmpRoomStack.Push(room2); room2.newOrReusedRoomGroupIndex = num; while (tmpRoomStack.Count != 0) { VehicleRoom room3 = tmpRoomStack.Pop(); foreach (VehicleRoom room4 in room3.Neighbors) { if (room4.newOrReusedRoomGroupIndex < 0 && ShouldBeInTheSameRoomGroup(room3, room4)) { room4.newOrReusedRoomGroupIndex = num; tmpRoomStack.Push(room4); } } } tmpRoomStack.Clear(); num++; } } return(num); }
internal void ExcludeFiles(ICollection <IPackageFile> packageFiles) { // Always exclude the nuspec file // Review: This exclusion should be done by the package builder because it knows which file would collide with the auto-generated // manifest file. var wildCards = _excludes.Concat(new[] { @"**\*" + NuGetConstants.ManifestExtension }); if (!_packArgs.NoDefaultExcludes) { // The user has not explicitly disabled default filtering. var excludedFiles = PathResolver.GetFilteredPackageFiles(packageFiles, ResolvePath, _defaultExcludes); if (excludedFiles != null) { foreach (var file in excludedFiles) { if (file is PhysicalPackageFile) { var physicalPackageFile = file as PhysicalPackageFile; _packArgs.Logger.Log(PackagingLogMessage.CreateWarning( string.Format(CultureInfo.CurrentCulture, Strings.Warning_FileExcludedByDefault, physicalPackageFile.SourcePath), NuGetLogCode.NU5119)); } } } } wildCards = wildCards.Concat(_packArgs.Exclude); PathResolver.FilterPackageFiles(packageFiles, ResolvePath, wildCards); }
/// <summary> /// Initializes a new instance of the <see cref="Room"/> class. /// </summary> /// <param name="in_walkableArea"> /// The <see cref="Space"/>s on which a <see cref="Characters.Being"/> /// may walk within this <see cref="Room"/>. /// </param> /// <param name="in_perimeter"> /// The <see cref="Space"/>s whose <see cref="Block"/>s and <see cref="Furnishing"/>s /// define the limits of this <see cref="Room"/>. /// </param> public Room(HashSet <Space> in_walkableArea, HashSet <Space> in_perimeter) { Precondition.IsNotNull(in_walkableArea, nameof(in_walkableArea)); Precondition.IsNotNull(in_perimeter, nameof(in_perimeter)); if (in_walkableArea.Count < All.Recipes.Rooms.MinWalkableSpaces || in_walkableArea.Count > All.Recipes.Rooms.MaxWalkableSpaces) { throw new IndexOutOfRangeException(nameof(in_walkableArea)); } var minimumPossiblePerimeterLength = 2 * in_walkableArea.Count + 2; if (in_perimeter.Count < minimumPossiblePerimeterLength) { throw new IndexOutOfRangeException($"{nameof(in_perimeter)} is too small to surround {nameof(in_walkableArea)}."); } if (!in_walkableArea.Concat(in_perimeter).Any(space => All.Parquets.Get <Furnishing>(space.Content.Furnishing)?.IsEntry ?? false)) { throw new ArgumentException($"No entry/exit found in {nameof(in_walkableArea)} or {nameof(in_perimeter)}."); } WalkableArea = in_walkableArea; Perimeter = in_perimeter; }
public static HashSet <string> getDep(HashSet <string> dllSet, string fileName) { /* * Add Part where * !File.exists(fileName) download(); */ XmlTextReader reader = new XmlTextReader(fileName); HashSet <string> set_for_this_Dep = new HashSet <string>(); while (reader.Read()) { if (reader.NodeType == System.Xml.XmlNodeType.Element && reader.Name == "Reference") { reader.Read(); string dllPath = reader.Value.Trim(); if (!dllSet.Contains(dllPath)) { dllSet.Add(dllPath); string depPath = dllPath.Substring(0, dllPath.Length - 4) + ".dep"; set_for_this_Dep.Add(depPath); //dllSet.UnionWith(getDep(dllSet, depPath)); } } } foreach (string dll in set_for_this_Dep) { dllSet.Concat(getDep(new HashSet <string>(), dll)); } return(dllSet); }
// Set states based on unit types private State[,] GetTileStates(IEnumerable <IntPoint3d> unitElementsOfTypeA, IEnumerable <IntPoint3d> unitElementsOfTypeB, IEnumerable <IntPoint3d> areaCentres) { // A + B var unitElements = unitElementsOfTypeA.Concat(unitElementsOfTypeB); var elements = new HashSet <IntPoint3d>(unitElements); //null space var nulls = new HashSet <IntPoint3d>(areaCentres); nulls.ExceptWith(elements); // all points var allPoints = elements.Concat(nulls); var sortedPoints = allPoints.OrderBy(p => p.X).ThenBy(p => p.Y); int tileSize = (int)Math.Sqrt(allPoints.Count()); // MAP ARRAY INTO TILESIZE x TILESIZE 2D ARRAY var tileUnits = Reshape(sortedPoints, tileSize, tileSize); // BASED ON 4X4 ARRAY CREATE SAME ARRAY BUT WITH STATES (EMPTY/HELF/FULL) INSTEAD OF POINTS State[,] tileStates = SetStatesBaseOnTileShape(unitElementsOfTypeA, unitElementsOfTypeB, tileUnits, tileSize); return(tileStates); }
public PropertyValueConverter(IEnumerable <Type> additionalScalarTypes, IEnumerable <IDestructuringPolicy> additionalDestructuringPolicies) { if (additionalScalarTypes == null) { throw new ArgumentNullException("additionalScalarTypes"); } if (additionalDestructuringPolicies == null) { throw new ArgumentNullException("additionalDestructuringPolicies"); } _scalarConversionPolicies = new IScalarConversionPolicy[] { new SimpleScalarConversionPolicy(BuiltInScalarTypes.Concat(additionalScalarTypes)), new NullableScalarConversionPolicy(), new EnumScalarConversionPolicy(), new ByteArrayScalarConversionPolicy(), new ReflectionTypesScalarConversionPolicy() }; _destructuringPolicies = additionalDestructuringPolicies .Concat(new [] { new DelegateDestructuringPolicy() }) .ToArray(); }
public static HashSet<String> GetReplaysInFolder(String folderPath, ReplayType type, bool recursive = true) { HashSet<String> replaysInFolder; if (type == ReplayType.Warcraft3) { //make a set of replays in the folder, and replays that exist replaysInFolder = new HashSet<String>(Directory.GetFiles(folderPath, "*.w3g")); if (recursive) { Queue<String> directories = new Queue<String>(Directory.GetDirectories(folderPath)); while (directories.Count > 0) { String dir = directories.Dequeue(); replaysInFolder = new HashSet<String>(replaysInFolder.Concat(Directory.GetFiles(dir, "*.w3g"))); directories = new Queue<String>(directories.Concat(Directory.GetDirectories(dir))); } HashSet<String> existingReplays = DatabaseHandler.GetWarcraft3ReplayPaths(); replaysInFolder.ExceptWith(existingReplays); } } //add new replay types here, otherwise an exception is thrown else { throw new ArgumentException(); } return replaysInFolder; }
void ProcessPublicBags(Random rand, Tuple <Player, int>[] dat) { int lootCount = PublicBag.BaseLootCount + PublicBag.PersonMultiplier * dat.Length; if (lootCount < PublicBag.MinLootCount) { lootCount = PublicBag.MinLootCount; } if (lootCount > PublicBag.MaxLootCount) { lootCount = PublicBag.MaxLootCount; } HashSet <Item> loots = new HashSet <Item>(); List <Item> pots = new List <Item>(); for (int i = 0; i < lootCount || (loots.Count < PublicBag.MinLootCount && pots.Count < PublicBag.MinLootCount); i++) { Item loot = PublicBag.GetRandomLoot(rand); if (loot != null) { if (loot.Potion) { pots.Add(loot); } else { loots.Add(loot); } } } ShowBags(rand, loots.Concat(pots), null); }
private static void ValidateRoslynList(IAssemblyLoader loader) { if (s_validated) { return; } // Check to see if the Roslyn code fix and refactoring provider type names can be found. // If this fails, OmniSharp has updated to a new version of Roslyn and one of the type names changed. var assemblies = new[] { loader.Load(Configuration.RoslynCSharpFeatures), loader.Load(Configuration.RoslynFeatures), loader.Load(Configuration.RoslynWorkspaces) }; var typeNames = _roslynListToRemove.Concat(new[] { AddImportProviderName, RemoveUnnecessaryUsingsProviderName }); foreach (var typeName in typeNames) { if (!ExistsInAssemblyList(typeName, assemblies)) { throw new InvalidOperationException($"Could not find '{typeName}'. Has this type name changed?"); } } s_validated = true; }
public HashSet <string> getValNames() { HashSet <string> names = leftNode.getValNames(); names.Concat(rightNode.getValNames()); return(names); }
public override bool Combine(Operator op, object b, out StoryVar result) { result = default(StoryVar); if (!(b is HarloweDataset)) { return(false); } var bSet = (HarloweDataset)b; switch (op) { case Operator.Add: result = new StoryVar(new HarloweDataset(Values.Concat(bSet.Values))); break; case Operator.Subtract: result = new StoryVar(new HarloweDataset(Values.Except(bSet.Values))); break; default: return(false); } return(true); }
/// <summary> /// Merge 2 WarningProperties objects. /// This method will combine the warning properties from both the collections. /// </summary> /// <param name="first">First Object to be merged.</param> /// <param name="second">Second Object to be merged.</param> /// <returns>Returns a WarningProperties with the combined warning properties. /// Returns the reference to one of the inputs if the other input is Null. /// Returns a Null if both the input properties are Null. </returns> public static HashSet <NuGetLogCode> MergeCodes( HashSet <NuGetLogCode> first, HashSet <NuGetLogCode> second) { HashSet <NuGetLogCode> result = null; if (TryMergeNullObjects(first, second, out var merged)) { result = merged; } else { if (first.Count == 0) { return(second); } if (second.Count == 0) { return(first); } if (first.SetEqualsWithNullCheck(second)) { return(first); } // Merge NoWarn Sets. result = new HashSet <NuGetLogCode>(first.Concat(second)); } return(result); }
private int CombineNewAndReusedRoomsIntoContiguousGroups() { int num = 0; foreach (Room reusedOldRoom in reusedOldRooms) { reusedOldRoom.newOrReusedRoomGroupIndex = -1; } foreach (Room item in reusedOldRooms.Concat(newRooms)) { if (item.newOrReusedRoomGroupIndex >= 0) { continue; } tmpRoomStack.Clear(); tmpRoomStack.Push(item); item.newOrReusedRoomGroupIndex = num; while (tmpRoomStack.Count != 0) { Room room = tmpRoomStack.Pop(); foreach (Room neighbor in room.Neighbors) { if (neighbor.newOrReusedRoomGroupIndex < 0 && ShouldBeInTheSameRoomGroup(room, neighbor)) { neighbor.newOrReusedRoomGroupIndex = num; tmpRoomStack.Push(neighbor); } } } tmpRoomStack.Clear(); num++; } return(num); }
public override bool Shutdown(bool waitForExit) { if (!base.Shutdown(true)) { return(false); } if (_log != null) { _log.Info("Propeller shutting down."); } if (_server != null) { _server.StopListening(); if (!_server.ShutdownComplete.WaitOne(TimeSpan.FromSeconds(5))) { _server.StopListening(brutal: true, blocking: true); } } foreach (var domain in _activeAppDomains.Concat(_inactiveAppDomains)) { domain.RunnerProxy.Shutdown(); domain.Dispose(); } return(true); }
public PropertyValueConverter(int maximumDestructuringDepth, IEnumerable <Type> additionalScalarTypes, IEnumerable <IDestructuringPolicy> additionalDestructuringPolicies) { if (additionalScalarTypes == null) { throw new ArgumentNullException(nameof(additionalScalarTypes)); } if (additionalDestructuringPolicies == null) { throw new ArgumentNullException(nameof(additionalDestructuringPolicies)); } if (maximumDestructuringDepth < 0) { throw new ArgumentOutOfRangeException(nameof(maximumDestructuringDepth)); } _maximumDestructuringDepth = maximumDestructuringDepth; _scalarConversionPolicies = new IScalarConversionPolicy[] { new SimpleScalarConversionPolicy(BuiltInScalarTypes.Concat(additionalScalarTypes)), new NullableScalarConversionPolicy(), new EnumScalarConversionPolicy(), new ByteArrayScalarConversionPolicy(), }; _destructuringPolicies = additionalDestructuringPolicies .Concat(new IDestructuringPolicy [] { new DelegateDestructuringPolicy(), new ReflectionTypesScalarDestructuringPolicy() }) .ToArray(); }
protected virtual async Task LoadAsync(IEnumerable <int> keys) { if (keys == null) { return; } var loadKeys = (_collect.Count == 0 ? keys : _collect.Concat(keys)) .Distinct() .Except(_loaded) .ToArray(); // Invalidate, do not load again. _collect.Clear(); if (loadKeys.Any()) { //++_roundTripCount; //Debug.WriteLine("Round trip {0} of {1}: {2}", _roundTripCount, typeof(T).Name, string.Join(",", loadKeys.OrderBy(x => x))); var items = await _load(loadKeys); _loaded.AddRange(loadKeys); if (items != null) { foreach (var range in items) { base.AddRange(range.Key, range.Value); } } } }
// Нейтрализация синтаксической конструкции без возврата значения private void NeutralizerDecorator ( Action <HashSet <SymbolEnum> > method, HashSet <SymbolEnum> starters, HashSet <SymbolEnum> followers, int errorCode = FORBIDDEN_SYMBOL_ERROR_CODE, HashSet <SymbolEnum> parentFollowers = null ) { if (parentFollowers != null) { followers = new HashSet <SymbolEnum>(followers.Concat(parentFollowers)); } if (!starters.Contains(CurrentSymbol)) { Error(errorCode); SkipToBefore(starters, followers); } if (starters.Contains(CurrentSymbol)) { method(followers); if (!followers.Contains(CurrentSymbol)) { Error(FORBIDDEN_SYMBOL_ERROR_CODE); SkipToAfter(followers); } } }
// Нейтрализация синтаксической конструкции с возвратом значения private TResult NeutralizerDecoratorWithReturn <TResult> ( Func <HashSet <SymbolEnum>, TResult> method, HashSet <SymbolEnum> starters, HashSet <SymbolEnum> followers, int errorCode = FORBIDDEN_SYMBOL_ERROR_CODE, HashSet <SymbolEnum> parentFollowers = null ) { if (parentFollowers != null) { followers = new HashSet <SymbolEnum>(followers.Concat(parentFollowers)); } if (!starters.Contains(CurrentSymbol)) { Error(errorCode); SkipToBefore(starters, followers); } if (starters.Contains(CurrentSymbol)) { var result = method(followers); if (!followers.Contains(CurrentSymbol)) { Error(FORBIDDEN_SYMBOL_ERROR_CODE); SkipToAfter(followers); } return(result); } return(default(TResult)); }
public PropertyValueConverter( int maximumDestructuringDepth, int maximumStringLength, int maximumCollectionCount, IEnumerable<Type> additionalScalarTypes, IEnumerable<IDestructuringPolicy> additionalDestructuringPolicies, bool propagateExceptions) { if (additionalScalarTypes == null) throw new ArgumentNullException(nameof(additionalScalarTypes)); if (additionalDestructuringPolicies == null) throw new ArgumentNullException(nameof(additionalDestructuringPolicies)); if (maximumDestructuringDepth < 0) throw new ArgumentOutOfRangeException(nameof(maximumDestructuringDepth)); if (maximumStringLength < 2) throw new ArgumentOutOfRangeException(nameof(maximumStringLength)); if (maximumCollectionCount < 1) throw new ArgumentOutOfRangeException(nameof(maximumCollectionCount)); _propagateExceptions = propagateExceptions; _maximumStringLength = maximumStringLength; _maximumCollectionCount = maximumCollectionCount; _scalarConversionPolicies = new IScalarConversionPolicy[] { new SimpleScalarConversionPolicy(BuiltInScalarTypes.Concat(additionalScalarTypes)), new EnumScalarConversionPolicy(), new ByteArrayScalarConversionPolicy() }; _destructuringPolicies = additionalDestructuringPolicies .Concat(new IDestructuringPolicy [] { new DelegateDestructuringPolicy(), new ReflectionTypesScalarDestructuringPolicy() }) .ToArray(); _depthLimiter = new DepthLimiter(maximumDestructuringDepth, this); }
public override void ReadData(Reader reader) { //Clear out any current jobs. while (normalJobs.Count > 0) { Cancel(normalJobs.First()); } while (emergencyJobs.Count > 0) { Cancel(emergencyJobs.First()); } //Stop JobQueries from doing stuff until after deserialization is done. JobQueries.Dispose(); base.ReadData(reader); reader.Collection("normalJobs", (MyData.Reader rd, ref Job outval, string name) => { outval = Job.Read(rd, name, TheMap); }, (i) => normalJobs); reader.Collection("emergencyJobs", (MyData.Reader rd, ref Job outVal, string name) => { outVal = Job.Read(rd, name, TheMap); }, (i) => emergencyJobs); foreach (Job j in normalJobs.Concat(emergencyJobs)) { InitJob(j); } }
/// <summary> /// Gets all titles that are suitable for player. /// </summary> /// <param name="player">The player for title checks.</param> /// <returns>All title suitable for given player or an empty list if none.</returns> public static ICollection GetPlayerTitles(GamePlayer player) { var titles = new HashSet <IPlayerTitle>(); titles.Add(ClearTitle); return(titles.Concat(m_titles.Where(t => t.IsSuitable(player))).ToArray()); }
public HashSet <FileInfo> SymmetricalDifference() { HashSet <FileInfo> except1 = this.DirectoryInfo1.Except(this.DirectoryInfo2, new FileNameComparer()).ToHashSet(); HashSet <FileInfo> except2 = this.DirectoryInfo2.Except(this.DirectoryInfo1, new FileNameComparer()).ToHashSet(); HashSet <FileInfo> resultWithUniqueValues = except1.Concat(except2).ToHashSet(); return(resultWithUniqueValues); }
public Path AddStep(Step step) { if (!CanAddCoordinates(step.Coordinates)) { throw new InvalidOperationException(); } return(Create(_steps.Concat(new[] { step }).ToArray())); }
public User() { MyRequests = new HashSet <FriendRequest>(); OthersRequests = new HashSet <FriendRequest>(); FriendsOne = new HashSet <FriendShip>(); FriendsTwo = new HashSet <FriendShip>(); Friends = FriendsOne.Concat(FriendsTwo).Distinct(); }
public List <Customer> GetUsingConcatAndMultipleToUpper() { return (_customers.Where(c => _verifiedCustomerNames .Concat(_certifiedCustomerNames) .Concat(_aListCustomerNames) .Contains(c.FirstName.ToUpper())).ToList()); }
private static void ExcludeFiles(ICollection<IPackageFile> packageFiles) { // Always exclude the nuspec file // Review: This exclusion should be done by the package builder because it knows which file would collide with the auto-generated // manifest file. var excludes = new HashSet<string>(StringComparer.OrdinalIgnoreCase); var wildCards = excludes.Concat(new[] {@"**\*" + Constants.ManifestExtension, @"**\*" + Constants.PackageExtension}); PathResolver.FilterPackageFiles(packageFiles, ResolvePath, wildCards); }
/// <summary> /// Combines and orders two parties by highest speed first. /// </summary> /// <param name="p1">Party 1</param> /// <param name="p2">Party 2</param> /// <returns>A HashSet of combined IEngageables from both parties sorted by descending speed.</returns> private static HashSet<IEngageable> arrangeAttackOrder(HashSet<IEngageable> p1, HashSet<IEngageable> p2) { HashSet<IEngageable> bothParties = new HashSet<IEngageable>(); IEnumerable<IEngageable> concat = p1.Concat(p2).OrderByDescending(member => member.getSpeed()); foreach (IEngageable member in concat) { bothParties.Add(member); } return bothParties; }
public Connection(IMessageBus messageBus, IJsonSerializer jsonSerializer, string baseSignal, string connectionId, IEnumerable<string> signals, IEnumerable<string> groups, ITraceManager traceManager) { _messageBus = messageBus; _serializer = jsonSerializer; _baseSignal = baseSignal; _connectionId = connectionId; _signals = new HashSet<string>(signals); _groups = new HashSet<string>(groups); _trace = traceManager; _signalsAndGroups = _signals.Concat(_groups); }
/* public static IAssemblySymbol[] GetReferencedAssemblies(IAssemblySymbol assembly) { var result = assembly.GetType().GetMethod("GetLinkedReferencedAssemblies", BindingFlags.NonPublic | BindingFlags.Instance).Invoke(assembly, new object[0]); if (result == null) return new IAssemblySymbol[0]; return result.ToArray(); } */ public static IAssemblySymbol[] Sort(Tuple<IAssemblySymbol, IAssemblySymbol[]>[] assemblies) { var currentList = assemblies.ToList(); var prepend = new HashSet<Tuple<IAssemblySymbol, IAssemblySymbol[]>>(); do { prepend.Clear(); var indices = currentList.Select((x, i) => new { Item = x, Index = i }).ToDictionary(x => x.Item.Item1, x => x.Index); for (var i = 0; i < currentList.Count; i++) { var item = currentList[i]; foreach (var referencedAssembly in item.Item2) { int assemblyIndex; if (indices.TryGetValue(referencedAssembly, out assemblyIndex)) { if (assemblyIndex > i) { var referencedAssemblyItem = currentList[assemblyIndex]; prepend.Add(referencedAssemblyItem); } } } } if (prepend.Any()) { var newItems = prepend.Concat(currentList.Where(x => !prepend.Contains(x))).ToArray(); currentList.Clear(); currentList.AddRange(newItems); } } while (prepend.Any()); return currentList.Select(x => x.Item1).ToArray(); }
void solutionWorker_ProgressChanged(object sender, ProgressChangedEventArgs e) { if (isCanceling || _stopReporting) { return; } progressBar.Value = e.ProgressPercentage; lblProgressText.Content = e.ProgressPercentage.ToString() + "/" + maxSteps; bestSoFar = (HashSet<ushort>)(e.UserState); lblBestResult.Content = string.Format(L10n.Plural("Best result so far: {0} additional point spent", "Best result so far: {0} additional points spent", (uint)bestSoFar.Count), bestSoFar.Count); tree.HighlightedNodes = new HashSet<ushort>(bestSoFar.Concat(tree.SkilledNodes)); tree.DrawNodeBaseSurroundHighlight(); }
private HashSet<FragmentQuery> GetUsedViewsAndRemoveTrueSurrogate(ref Tile<FragmentQuery> rewriting) { var usedViews = new HashSet<FragmentQuery>(rewriting.GetNamedQueries()); if (!usedViews.Contains(_trueViewSurrogate.Query)) { return usedViews; // no surrogate } // remove the surrogate usedViews.Remove(_trueViewSurrogate.Query); // first, try to union usedViews to see whether we can get True Tile<FragmentQuery> unionTile = null; var usedFollowedByUnusedViews = usedViews.Concat(_fragmentQueries); foreach (var view in usedFollowedByUnusedViews) { unionTile = (unionTile == null) ? CreateTile(view) : _qp.Union(unionTile, CreateTile(view)); usedViews.Add(view); if (IsTrue(unionTile.Query)) { // we found a true rewriting rewriting = rewriting.Replace(_trueViewSurrogate, unionTile); return usedViews; } } // now we either found the rewriting or we can just take all views because we are in relaxed mode for update views Debug.Fail("Shouldn't happen"); return usedViews; }
public IndexDefinition CreateIndexDefinition() { var fromClauses = new HashSet<string>(); var realMappings = new HashSet<string>(); if (!string.IsNullOrEmpty(ForEntityName)) { fromClauses.Add("from doc in docs." + ForEntityName); } else { fromClauses.Add("from doc in docs"); } foreach (var map in Items) { var currentDoc = "doc"; var currentExpression = new StringBuilder(); int currentIndex = 0; while (currentIndex < map.From.Length) { char currentChar = map.From[currentIndex++]; switch (currentChar) { case ',': // doc.NewDoc.Items String newDocumentSource = string.Format("{0}.{1}", currentDoc, currentExpression); // docNewDocItemsItem String newDoc = string.Format("{0}Item", newDocumentSource.Replace(".", "")); // from docNewDocItemsItem in doc.NewDoc.Items String docInclude = string.Format("from {0} in ((IEnumerable<dynamic>){1}).DefaultIfEmpty()", newDoc, newDocumentSource); fromClauses.Add(docInclude); // Start building the property again currentExpression.Clear(); // And from this new doc currentDoc = newDoc; break; default: currentExpression.Append(currentChar); break; } } if (currentExpression.Length > 0 && currentExpression[0] != '[') { currentExpression.Insert(0, '.'); } // We get rid of any _Range(s) etc var indexedMember = currentExpression.ToString().Replace("_Range", ""); if (indexedMember.Length == 0) { realMappings.Add(string.Format("{0} = {1}", map.To.Replace("_Range", ""), currentDoc )); } else { realMappings.Add(string.Format("{0} = {1}{2}", map.To.Replace("_Range", ""), currentDoc, indexedMember )); } } var index = new IndexDefinition { Map = string.Format("{0}\r\nselect new {{ {1} }}", string.Join("\r\n", fromClauses.ToArray()), string.Join(", ", realMappings.Concat(new[] { AggregationMapPart() }).Where(x => x != null))), Reduce = DynamicAggregation ? null : AggregationReducePart(), TransformResults = DynamicAggregation ? AggregationReducePart() : null, }; if (DynamicAggregation) { foreach (var item in GroupByItems) { index.Stores[ToFieldName(item.To)] = FieldStorage.Yes; } } foreach (var descriptor in SortDescriptors) { index.SortOptions[ToFieldName(descriptor.Field)] = descriptor.FieldType; } foreach (var field in HighlightedFields.EmptyIfNull()) { index.Stores[field] = FieldStorage.Yes; index.Indexes[field] = FieldIndexing.Analyzed; } return index; }
static void Main(string[] args) { ConfigurationItemFactory.Default.Targets.RegisterDefinition("ServiceManager", typeof(ServiceManagerTarget)); string subdir = null, runDebugMethodOnExtension = null; var baseDir = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Extensions"); Environment.CurrentDirectory = ConfigurationManager.AppSettings["DataDirectory"] ?? AppDomain.CurrentDomain.BaseDirectory; var extensionIDs = new HashSet<string>(); Process process = null; Guid guid = Guid.Empty; Logger logger = null; var options = new OptionSet { { "guid=", "Specifies a GUID that the extension can use to identify itself to the parent process", v => { Guid id; if(!Guid.TryParse(v, out id)) throw new OptionException("The specified id was not a valid GUID", "guid"); guid = id; } }, { "basedir=", "Specifies the base plugins directory (can be relative or absolute)", v => baseDir = Path.IsPathRooted(v) ? v : Path.Combine(AppDomain.CurrentDomain.BaseDirectory, v) }, { "subdir=", "Specifies the extension subdirectory name", v => subdir = v }, { "debug=", "Specifies an extension ID to run the debug method on", v => runDebugMethodOnExtension = v }, { "pid=", "Parent process ID - if specified, this process will close when the parent process closes", v => { int pid; if(!int.TryParse(v, out pid)) throw new OptionException("The parent process ID must be a 32 bit integer", "pid"); try { process = Process.GetProcessById(pid); } catch(Exception ex) { throw new OptionException(ex.Message, "pid"); } if(process == null) throw new OptionException("There is no process with ID [" + pid + "]", "pid"); } }, { "<>", v => extensionIDs.Add(v) } }; CancellationTokenSource src = new CancellationTokenSource(); try { options.Parse(args); if(subdir == null) { Console.Write("Enter plugin directory name (not the full path): "); subdir = Console.ReadLine(); if(string.IsNullOrWhiteSpace(subdir)) { Console.WriteLine("No plugin directory specified."); Exit(null, src, ExtensionRunnerExitCode.InvalidArguments); } } GlobalDiagnosticsContext.Set("ExeBaseDir", new FileInfo(Assembly.GetExecutingAssembly().Location).Directory.FullName); GlobalDiagnosticsContext.Set("SubDirName", subdir); GlobalDiagnosticsContext.Set("ParentProcess", process == null ? "" : process.Id.ToString()); logger = LogManager.GetCurrentClassLogger(); logger.Info(new [] { "ExtensionRunner Started:", " => Command Line: " + Environment.CommandLine, " => Subdirectory: " + subdir, " => Base Directory: " + baseDir, " => Specified Extensions: " + extensionIDs.Concat(", "), " => GUID: " + guid, " => Parent Process ID: " + (process == null ? "(none)" : process.Id.ToString()) }.Concat(Environment.NewLine)); AppDomain.CurrentDomain.UnhandledException += (s,e) => logger.FatalException("UNTRAPPED SERVICE EXCEPTION", (Exception)e.ExceptionObject); TaskScheduler.UnobservedTaskException += (s,e) => logger.FatalException("UNTRAPPED TASK EXCEPTION:", e.Exception); if(process != null) { Task.Factory.StartNew(() => { while(!src.IsCancellationRequested) { process.Refresh(); if(process.HasExited) { logger.Warn("Detected parent process shutdown."); Exit(logger, src, ExtensionRunnerExitCode.ParentExited); return; } Thread.Sleep(250); } }); } // Read list of available extensions Dictionary<string, ExtensionInfo> extInfos; using(var loader = new SafeExtensionLoader(baseDir, subdir, process == null ? "" : process.Id.ToString(), src)) extInfos = loader.AvailableExtensions.ToDictionary(x => x.ExtensionID, x => x.Clone()); if(extensionIDs.Count == 0) extensionIDs = new HashSet<string>(extInfos.Select(x => x.Key)); // use all available extensions else extensionIDs = new HashSet<string>(extensionIDs.Where(x => extInfos.ContainsKey(x))); // eliminate invalid any extension IDs logger.Info("Active extensions: " + (extensionIDs.Any() ? extensionIDs.Concat(", ") : "(none)")); logger.Info("Inactive extensions: " + (!extensionIDs.Any() ? extInfos.Where(x => !extensionIDs.Contains(x.Key)).Concat(", ") : "(none)")); var extLoaders = new List<SafeExtensionLoader>(); var extTasks = new List<Task>(); try { foreach(var id in extensionIDs) { logger.Debug("Starting appdomain for extension: {0}", id); var loader = new SafeExtensionLoader(baseDir, subdir, process == null ? "" : process.Id.ToString(), src); var extID = id; extTasks.Add(Task.Factory.StartNew(() => loader.RunExtension(guid, runDebugMethodOnExtension == extID, extID))); } Task.WaitAll(extTasks.ToArray(), src.Token); } finally { foreach(var extLoader in extLoaders) extLoader.Dispose(); } //using(var loader = new SafeExtensionLoader(baseDir, subdir, process == null ? "" : process.Id.ToString(), src)) //{ // var runExtsTask = Task.Factory.StartNew(() => // { // // Verify that all extensions are available and if so, run them // var sb = new StringBuilder(); // sb.AppendLine("[list of all plugins]"); // foreach(var extInfo in loader.AllExtensions) // sb.AppendLine("\t" + extInfo.ExtensionID + ": " + extInfo.Name + " [" + (extensionIDs.Count == 0 || extensionIDs.Contains(extInfo.ExtensionID) ? "ACTIVE" : "INACTIVE") + "]"); // logger.Info(sb.ToString()); // loader.RunExtensions(guid, runDebugMethodOnExtension, extensionIDs.ToArray()); // }, src.Token); // loader.RunMainAppThread(); // Task.WaitAll(new[] { runExtsTask }, src.Token); //} } catch(OptionException ex) { if(logger != null) logger.Error("Invalid command options: " + ex.Message, options.WriteOptionDescriptions()); Exit(logger, src, ExtensionRunnerExitCode.Exception); } catch(Exception ex) { if(logger != null) logger.FatalException("An exception was thrown", ex); Exit(logger, src, ExtensionRunnerExitCode.Exception); } finally { Exit(logger, src, ExtensionRunnerExitCode.Success); } }
protected override void ProcessAssembly(AssemblyDefinition assemblyDef) { if (_frameworkProfile == null) { _frameworkProfile = assemblyDef.GuessAssemblyProfile(); } if (_frameworkProfile != null) { foreach (var moduleDef in assemblyDef.Modules) { var resolver = moduleDef.AssemblyResolver as DefaultAssemblyResolver; if (resolver != null) { resolver.AddSearchDirectory(_frameworkProfile.ReferencesDirectory); } } } _usedTypeReferences = new HashSet<TypeReference>(CecilEqualityComparer.Default); base.ProcessAssembly(assemblyDef); var unprocessedTypes = new Queue<TypeReference>(_usedTypeReferences); _usedTypeReferences = null; var processedTypes = new HashSet<TypeDefinition>(CecilEqualityComparer.Default); var unresolvedTypes = new HashSet<TypeReference>(CecilEqualityComparer.Default); while (unprocessedTypes.Any()) { var typeRef = unprocessedTypes.Dequeue(); if (typeRef == null) { continue; } if (typeRef.IsGenericParameter) { continue; } var typeSpec = typeRef as TypeSpecification; if (typeSpec != null) { var elementType = typeSpec.ElementType; Debug.Assert(elementType != null); unprocessedTypes.Enqueue(elementType); var genericInstanceTypeRef = typeRef as GenericInstanceType; if (genericInstanceTypeRef != null) { foreach (var genericArgument in genericInstanceTypeRef.GenericArguments) { unprocessedTypes.Enqueue(genericArgument); } } var requiredModifierTypeRef = typeRef as RequiredModifierType; if (requiredModifierTypeRef != null) { unprocessedTypes.Enqueue(requiredModifierTypeRef.ModifierType); } var optionalModifierTypeRef = typeRef as OptionalModifierType; if (optionalModifierTypeRef != null) { unprocessedTypes.Enqueue(optionalModifierTypeRef.ModifierType); } var functionPointerTypeRef = typeRef as FunctionPointerType; if (functionPointerTypeRef != null) { unprocessedTypes.Enqueue(functionPointerTypeRef.ReturnType); foreach (var parameter in functionPointerTypeRef.Parameters) { unprocessedTypes.Equals(parameter.ParameterType); foreach (var customAttr in parameter.CustomAttributes) { unprocessedTypes.Enqueue(customAttr.AttributeType); } } foreach (var customAttr in functionPointerTypeRef.MethodReturnType.CustomAttributes) { unprocessedTypes.Enqueue(customAttr.AttributeType); } } continue; } var typeDef = typeRef as TypeDefinition; if (typeDef == null) { typeDef = typeRef.TryResolve(); if (typeDef != null) { unprocessedTypes.Enqueue(typeDef); } else { unresolvedTypes.Add(typeRef); Debug.WriteLine(string.Format("Cannot resolve type {0}", typeRef.FullName)); } continue; } processedTypes.Add(typeDef); } _resolvedTypes = processedTypes; _unresolvedTypes = unresolvedTypes; _allTypes = new HashSet<TypeReference>(_unresolvedTypes.Concat(_resolvedTypes), CecilEqualityComparer.Default); }
public static HashSet<string> getDep(HashSet<string> dllSet, string fileName) { /* * Add Part where * !File.exists(fileName) download(); */ XmlTextReader reader = new XmlTextReader(fileName); HashSet<string> set_for_this_Dep = new HashSet<string>(); while (reader.Read()) { if (reader.NodeType == System.Xml.XmlNodeType.Element && reader.Name == "Reference") { reader.Read(); string dllPath = reader.Value.Trim(); if (!dllSet.Contains(dllPath)) { dllSet.Add(dllPath); string depPath = dllPath.Substring(0, dllPath.Length-4) + ".dep"; set_for_this_Dep.Add(depPath); //dllSet.UnionWith(getDep(dllSet, depPath)); } } } foreach (string dll in set_for_this_Dep) { dllSet.Concat(getDep(new HashSet<string>(), dll)); } return dllSet; }
public void ExecuteTheseHandlersFirst(params Type[] handlerTypes) { AssertInit(); var firstOrderedHandlers = new HashSet<Type>(); foreach (var handler in handlerTypes) { var firstOrderedHandler = orderedMessageHandlerList.FirstOrDefault(x => handler.IsAssignableFrom(x)); if (firstOrderedHandler != null && !firstOrderedHandlers.Contains(firstOrderedHandler)) { firstOrderedHandlers.Add(firstOrderedHandler); } } var allOtherHandlers = orderedMessageHandlerList.Except(firstOrderedHandlers).ToList(); orderedMessageHandlerList = firstOrderedHandlers.Concat(allOtherHandlers).ToList(); }
/// <summary> /// Gets all titles that are suitable for player. /// </summary> /// <param name="player">The player for title checks.</param> /// <returns>All title suitable for given player or an empty list if none.</returns> public static ICollection GetPlayerTitles(GamePlayer player) { var titles = new HashSet<IPlayerTitle>(); titles.Add(ClearTitle); return titles.Concat(m_titles.Where(t => t.IsSuitable(player))).ToArray(); }
void solutionWorker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { if (isCanceling) { btnPopupPauseResume.IsEnabled = true; isCanceling = false; return; } lblProgressText.Content = L10n.Message("Finished!"); btnPopupCancelClose.Content = L10n.Message("Close"); btnPopupPauseResume.IsEnabled = false; bestSoFar = (HashSet<ushort>)e.Result; isPaused = true; // Draw the final solution in case not all ProgressChangeds get executed. progressBar.Value = maxSteps; lblBestResult.Content = string.Format(L10n.Plural("Best result so far: {0} additional point spent", "Best result so far: {0} additional points spent", (uint)bestSoFar.Count), bestSoFar.Count); tree.HighlightedNodes = new HashSet<ushort>(bestSoFar.Concat(tree.SkilledNodes)); tree.DrawNodeBaseSurroundHighlight(); }
private void ProcessPublicBags(Random rand, Tuple<Player, int>[] dat) { var lootCount = PublicBag.BaseLootCount + PublicBag.PersonMultiplier*dat.Length; if (lootCount < PublicBag.MinLootCount) lootCount = PublicBag.MinLootCount; if (lootCount > PublicBag.MaxLootCount) lootCount = PublicBag.MaxLootCount; var loots = new HashSet<Item>(); var pots = new List<Item>(); for (var i = 0; i < lootCount || (loots.Count < PublicBag.MinLootCount && pots.Count < PublicBag.MinLootCount); i++) { var loot = PublicBag.GetRandomLoot(rand); if (loot != null) { if (loot.Potion) pots.Add(loot); else loots.Add(loot); } } ShowBags(rand, loots.Concat(pots), null); }
/// <summary> /// Converts an MST spanning a set of GraphNodes back into its equivalent /// as a HashSet of SkillNode IDs. /// </summary> /// <param name="mst">The spanned MinimalSpanningTree.</param> /// <param name="visualize">A debug parameter that highlights all used /// GraphNodes' SkillNode equivalents in the tree.</param> /// <returns>A HashSet containing the node IDs of all SkillNodes spanned /// by the MST.</returns> HashSet<ushort> SpannedMstToSkillnodes(MinimalSpanningTree mst, bool visualize) { if (!mst.IsSpanned) throw new Exception("The passed MST is not spanned!"); HashSet<ushort> newSkilledNodes = new HashSet<ushort>(); foreach (GraphEdge edge in mst.SpanningEdges) { ushort target = edge.outside.Id; HashSet<ushort> start; if (edge.inside is Supernode) start = tree.SkilledNodes; else start = new HashSet<ushort>() { edge.inside.Id }; var path = tree.GetShortestPathTo(target, start); newSkilledNodes = new HashSet<ushort>(newSkilledNodes.Concat(path)); } if (visualize) { tree._nodeHighlighter.UnhighlightAllNodes(NodeHighlighter.HighlightState.FromAttrib); foreach (GraphNode steinerNode in mst.mstNodes) tree._nodeHighlighter.HighlightNode(SkillTree.Skillnodes[steinerNode.Id], NodeHighlighter.HighlightState.FromAttrib); } //tree.DrawHighlights(tree._nodeHighlighter); return newSkilledNodes; }