static void Main(string[] args) { /* The words of the dictionary this evil hangman example works with are supplied in the linear array below. * For a more sophisticated implementation loading from an external file is obviously possible, but the idea here * was to provide a simple solution to the problem, so beginner programmers could understand how they could solve * it themselves. */ string[] dict = {"bakalava", "balamata", "balerina", "balirina", "baniceta", "kalotina", "kolibata", "korubata"}; HashSet<string> words = new HashSet<string>(dict); char[] seq = {'l', 'r', 'i', 'o', 'e', 'n', 'm', 'k', 'v', 't', 'b', 'c', 'd', 'f', 'g', 'h', 'j', 'p', 'r', 's', 'u', 'w', 'x', 'y', 'z'}; HashSet<char> toGuess = new HashSet<char>(seq); Random rand = new Random(); Console.WriteLine("Pick a word: (1-" + words.Count + ")"); int ind = int.Parse(Console.ReadLine()); Console.WriteLine("The word you chose is " + answer + ". Let's see whether the computer can guess it..."); answer = dict[ind - 1]; guessed.Add(answer[0]); guessed.Add(answer[answer.Length - 1]); while (words.Count != 1) { words.RemoveWhere(Remover); PrintGuessed(guessed); Console.WriteLine(string.Join(", ", words)); guessed.Add(toGuess.First()); toGuess.Remove(toGuess.First()); } Console.WriteLine("The word is: " + words.First()); Console.ReadLine(); }
public void Execute() { var mask = new NodeMask(); mask.Label = "Cycles"; mask.IsShowMask = true; try { if (!myPresentation.Graph.Nodes.Any()) { return; } var unvisited = new HashSet<Node>(myPresentation.Graph.Nodes); unvisited.RemoveWhere(n => n.In.Count == 0 || n.Out.Count == 0); while (unvisited.Count > 0) { var current = unvisited.First(); unvisited.Remove(current); foreach (var node in FindCycles(unvisited, current, new HashSet<Node> { current })) { mask.Set(node); } } } finally { var module = myPresentation.GetModule<INodeMaskModule>(); module.Push(mask); } }
/// <summary> /// Find Longest Consequitive Sequence /// </summary> /// <param name="arr">Source Array</param> /// <returns>Length of longest consecutive numbers</returns> public static int LongestConsecutiveSequenceLength(int[] arr) { HashSet<int> allElements = new HashSet<int>(arr); int maxConsecutiveLength = 1; while (allElements.Count > 0) { int firstValue = allElements.First(); int leftElement = firstValue; int currentLength = 1; while (allElements.Contains(leftElement - 1)) { allElements.Remove(leftElement - 1); leftElement--; currentLength++; } int rightElement = firstValue; while (allElements.Contains(rightElement + 1)) { allElements.Remove(rightElement + 1); rightElement++; currentLength++; } if (currentLength > maxConsecutiveLength) { maxConsecutiveLength = currentLength; } allElements.Remove(firstValue); } return maxConsecutiveLength; }
public override void BuildNode (ITreeBuilder treeBuilder, object dataObject, NodeInfo nodeInfo) { //modules do no have names/IDs, but genrally the only reason they exist //is because they have additional, optional dependencies //so find the dependencies that are not referenced in other modules //and use one as the label var module = (ModuleDescription)dataObject; var deps = new HashSet<string> (); foreach (Dependency dep in module.Dependencies) { deps.Add (dep.Name); } foreach (ModuleDescription other in module.ParentAddinDescription.AllModules) { if (other == module) { continue; } foreach (Dependency dep in other.Dependencies) { deps.Remove (dep.Name); } } if (deps.Count > 0) { nodeInfo.Label = deps.First ().Split (new[] { ' '})[0]; } else { nodeInfo.Label = "Module"; } }
public int[] FindBest() { var remaining = new HashSet<Point>(_cities); var first = remaining.First(); var route = new List<Point> { first }; remaining.Remove(first); var numericRoute = new List<int>{_cities.IndexOf(first)}; var distance = 0.0d; while (remaining.Any()) { var shortest = double.MaxValue; Point next = null; foreach (var p in remaining) { var d = Distance(route.Last(), p); if (d < shortest) { shortest = d; next = p; } } route.Add(next); numericRoute.Add(_cities.IndexOf(next)); remaining.Remove(next); distance += shortest; } distance += Distance(route.First(), route.Last()); Console.WriteLine("Distance calculated in closestneighbour: " + distance); return numericRoute.ToArray(); }
public static Bitmap FillBackGround(Point e, Bitmap bmpSource, int partition,Color fillColor) { int c; Bitmap bmp = new Bitmap(bmpSource); HashSet<Point> pts = new HashSet<Point>(); HashSet<Point> result = new HashSet<Point>(); pts.Add(e); result.Add(e); while (pts.Count > 0) { Point p = pts.First(); int x = p.X; int y = p.Y; c = bmpSource.GetPixel(x, y).R; bmp.SetPixel(x, y, fillColor); for (int i = -1; i < 2; i++) for (int j = -1; j < 2; j++) { int a = x + i; int b = y + j; if (a > -1 && a < bmp.Width && b > -1 && b < bmp.Height && bmpSource.GetPixel(a, b).R > c - partition && bmpSource.GetPixel(a, b).R < c + partition && !result.Contains(new Point(a, b))) { pts.Add(new Point(a, b)); result.Add(new Point(a, b)); } } pts.Remove(p); } return bmp; }
private void CalculateBodyAndPivots() { var parentCfg = Scope.Parent.LocalCfg; var cflow = new List<ControlFlowBlock>(); var pivots = new List<ControlFlowBlock>(); var todo = new HashSet<ControlFlowBlock>{Head}; while (todo.IsNotEmpty()) { var v = todo.First(); cflow.Add(v); todo.Remove(v); var h_pivots = Scope.Hierarchy().SelectMany(s => s.Pivots); if (h_pivots.Contains(v)) { pivots.Add(v); } else { var inEdges = parentCfg.TreeVedges(null, v); var innerEdges = parentCfg.Edges(cflow, cflow); var rootEdge = parentCfg.Vedge(Root, v); inEdges.Except(innerEdges).Except(rootEdge).AssertEmpty(); var outEdges = parentCfg.Vedges(v, null); var pending = outEdges.Select(e => e.Target).Where(v1 => !cflow.Contains(v1)); pending.ForEach(v1 => todo.Add(v1)); } } Body = cflow.Except(pivots).ToReadOnly(); Pivots = pivots.ToReadOnly(); }
public FeatureInteraction(HashSet<XgbTreeNode> interaction, double gain, double cover, double pathProbability, double depth, double treeIndex, double fScore = 1) { SplitValueHistogram = new SplitValueHistogram(); List<string> features = interaction.OrderBy(x => x.Feature).Select(y => y.Feature).ToList(); Name = string.Join("|", features); Depth = interaction.Count - 1; Gain = gain; Cover = cover; FScore = fScore; FScoreWeighted = pathProbability; AverageFScoreWeighted = FScoreWeighted / FScore; AverageGain = Gain / FScore; ExpectedGain = Gain * pathProbability; TreeIndex = treeIndex; TreeDepth = depth; AverageTreeIndex = TreeIndex / FScore; AverageTreeDepth = TreeDepth / FScore; HasLeafStatistics = false; if (Depth == 0) { SplitValueHistogram.AddValue(interaction.First().SplitValue); } }
private static void Main() { var input = Console.ReadLine().Split(' ').Select(int.Parse).ToArray(); var n = input[0]; var coord0 = new Coord(input[1], input[2]); var rgcoord = new HashSet<Coord>(); for (var i = 0; i < n; i++) { input = Console.ReadLine().Split(' ').Select(int.Parse).ToArray(); rgcoord.Add(new Coord(input[0], input[1])); } var d = 0; while (rgcoord.Any()) { d++; var coord = rgcoord.First(); var vX = coord.x - coord0.x; var vY = coord.y - coord0.y; foreach (var coordT in rgcoord.ToList()) { if (vY*(coordT.x - coord0.x) == vX*(coordT.y - coord0.y)) rgcoord.Remove(coordT); } } Console.WriteLine(d); }
public IEnumerable<State> TransitiveClosure ( Func<SimpleTransition, bool> selector, Func<SimpleTransition, bool> cancel) { var done = new HashSet<State> (); var work = new HashSet<State> (); work.Add (this); while (work.Any ()) { var q = work.First (); work.Remove (q); done.Add (q); if (q.Delta == null) { continue; } var ts = q.Delta .Where (t => selector == null || selector (t)) .ToArray (); if (cancel != null && ts.Any (cancel)) { return null; } foreach (var qn in ts.Select (t => t.Next)) { if (!done.Contains (qn)) { work.Add (qn); } } } return done; }
// This method prints the guessed letters from the guessed hash and then reveals the guessed positionfs of the sought // word, whilst still keeping the non-guessed ones hidden. public static void PrintGuessed(HashSet<char> input) { HashSet<char> h = new HashSet<char>(input); Console.Write("Guessed so far:"); while (h.Count != 0) { Console.Write(" " + h.First()); h.Remove(h.First()); } Console.Write("\n Hangman: "); for (int i = 0; i < answer.Length; i++) { if (input.Contains(answer[i])) Console.Write(answer[i]); else Console.Write('_'); } Console.Write("\n\n"); }
public static HashSet<Point> getConnectedPicture(Point e, Bitmap bmpSource) { Color c = bmpSource.GetPixel(e.X, e.Y); Bitmap bmp = new Bitmap(bmpSource.Width, bmpSource.Height); HashSet<Point> pts = new HashSet<Point>(); HashSet<Point> result = new HashSet<Point>(); pts.Add(e); result.Add(e); while (pts.Count > 0) { Point p = pts.First(); int x = p.X; int y = p.Y; bmp.SetPixel(x, y, c); for (int i = -1; i < 2; i++) for (int j = -1; j < 2; j++) { int a = x + i; int b = y + j; if (a > -1 && a < bmp.Width && b > -1 && b < bmp.Height && bmpSource.GetPixel(a, b) == c && bmp.GetPixel(a, b) != c) { pts.Add(new Point(a, b)); result.Add(new Point(a, b)); } } pts.Remove(p); } return result; }
/// <summary> /// When the tessellated solid is sliced at the specified plane, the contact surfaces are /// described by the return ContactData object. This is a non-destructive function typically /// used to find the shape and size of 2D surface on the prescribed plane.. /// </summary> /// <param name="plane">The plane.</param> /// <param name="ts">The ts.</param> /// <returns>ContactData.</returns> /// <exception cref="System.Exception">Contact Edges found that are not contained in loop.</exception> public static ContactData DefineContact(Flat plane, TessellatedSolid ts) { var vertexDistancesToPlane = new double[ts.NumberOfVertices]; for (int i = 0; i < ts.NumberOfVertices; i++) vertexDistancesToPlane[i] = ts.Vertices[i].Position.dotProduct(plane.Normal) - plane.DistanceToOrigin; // the edges serve as the easiest way to identify where the solid is interacting with the plane. // Instead of a foreach, the while loop lets us look ahead to known edges that are irrelevant. var edgeHashSet = new HashSet<Edge>(ts.Edges); // Contact elements are constructed and then later arranged into loops. Loops make up the returned object, ContactData. var straddleContactElts = new List<ContactElement>(); var inPlaneContactElts = new List<CoincidentEdgeContactElement>(); while (edgeHashSet.Any()) { // instead of the foreach, we have this while statement and these first 2 lines to enumerate over the edges. var edge = edgeHashSet.First(); edgeHashSet.Remove(edge); var toDistance = vertexDistancesToPlane[edge.To.IndexInList]; var fromDistance = vertexDistancesToPlane[edge.From.IndexInList]; if (StarMath.IsNegligible(toDistance) && StarMath.IsNegligible(fromDistance)) ContactElement.MakeInPlaneContactElement(plane, edge, edgeHashSet, vertexDistancesToPlane, inPlaneContactElts); else if ((toDistance > 0 && fromDistance < 0) || (toDistance < 0 && fromDistance > 0)) straddleContactElts.Add(new ThroughFaceContactElement(plane, edge, toDistance)); } foreach (var contactElement in inPlaneContactElts) { // next, we find any additional vertices that just touch the plane but don't have in-plane edges // to facilitate this we negate all vertices already captures in the inPlaneContactElts vertexDistancesToPlane[contactElement.StartVertex.IndexInList] = double.NaN; vertexDistancesToPlane[contactElement.EndVertex.IndexInList] = double.NaN; } for (int i = 0; i < ts.NumberOfVertices; i++) { if (!StarMath.IsNegligible(vertexDistancesToPlane[i])) continue; var v = ts.Vertices[i]; PolygonalFace negativeFace, positiveFace; if (ThroughVertexContactElement.FindNegativeAndPositiveFaces(plane, v, vertexDistancesToPlane, out negativeFace, out positiveFace)) straddleContactElts.Add(new ThroughVertexContactElement(v, negativeFace, positiveFace)); } straddleContactElts.AddRange(inPlaneContactElts); var loops = new List<Loop>(); var numberOfTries = 0; while (straddleContactElts.Any() && numberOfTries < straddleContactElts.Count) { // now build loops from stringing together contact elements var loop = FindLoop(plane, straddleContactElts, vertexDistancesToPlane); if (loop != null) { Debug.WriteLine(loops.Count + ": " + loop.MakeDebugContactString() + " "); loops.Add(loop); numberOfTries = 0; } else numberOfTries++; } if (straddleContactElts.Any()) Debug.WriteLine("Contact Edges found that are not contained in loop."); return new ContactData(loops); }
public void QueryTerm_bases_hashing_on_escaped_value() { var t1 = new QueryTerm("a", "x", false, false); var t2 = new QueryTerm("a", "y", true, false); var set = new HashSet<QueryTerm>(); set.Add(t1); set.Add(t2); Assert.AreEqual(1, set.Count); Assert.AreEqual("a", set.First().Escaped); }
public void ControllerNameByAttribute() { var testedType = typeof (TestClassWithControllerAttribute); var result = GetScanner().Scan(testedType).ToArray(); var expectation = TestClassWithControllerAttribute.ControllerName; var results = new HashSet<string>(result.Select(s => s.ControllerName)); Assert.That(results.Count, Is.EqualTo(1)); Assert.That(results.First(), Is.EqualTo(expectation)); }
public void ControllerNameByClassConvention() { var testedType = typeof (TestClassEndingController); var result = GetScanner().Scan(testedType).ToArray(); var expectation = typeof(TestClassEndingController).Name.Substring(0, typeof(TestClassEndingController).Name.Length - 10); var results = new HashSet<string>(result.Select(s => s.ControllerName)); Assert.That(results.Count, Is.EqualTo(1)); Assert.That(results.First(), Is.EqualTo(expectation)); }
public void ControllerNameByClassUnconventional() { var testedType = typeof (TestClassNonConventional); var result = GetScanner().Scan(testedType).ToArray(); var expectation = typeof(TestClassNonConventional).Name; var results = new HashSet<string>(result.Select(s => s.ControllerName)); Assert.That(results.Count, Is.EqualTo(1)); Assert.That(results.First(), Is.EqualTo(expectation)); }
public override bool Invoke(ulong steamId, long playerId, string messageText) { if (messageText.Equals("/deleteship", StringComparison.InvariantCultureIgnoreCase)|| messageText.Equals("/delship", StringComparison.InvariantCultureIgnoreCase)) { var entity = Support.FindLookAtEntity(MyAPIGateway.Session.ControlledObject, true, false, false, false, false, false); if (entity != null) { var shipEntity = entity as Sandbox.ModAPI.IMyCubeGrid; if (shipEntity != null) { DeleteShip(entity); return true; } } MyAPIGateway.Utilities.ShowMessage("deleteship", "No ship targeted."); return true; } var match = Regex.Match(messageText, @"/((delship)|(deleteship))\s+(?<Key>.+)", RegexOptions.IgnoreCase); if (match.Success) { var shipName = match.Groups["Key"].Value; var currentShipList = new HashSet<IMyEntity>(); MyAPIGateway.Entities.GetEntities(currentShipList, e => e is Sandbox.ModAPI.IMyCubeGrid && e.DisplayName.Equals(shipName, StringComparison.InvariantCultureIgnoreCase)); if (currentShipList.Count == 1) { DeleteShip(currentShipList.First()); return true; } else if (currentShipList.Count == 0) { int index; if (shipName.Substring(0, 1) == "#" && Int32.TryParse(shipName.Substring(1), out index) && index > 0 && index <= CommandListShips.ShipCache.Count && CommandListShips.ShipCache[index - 1] != null) { DeleteShip(CommandListShips.ShipCache[index - 1]); CommandListShips.ShipCache[index - 1] = null; return true; } } else if (currentShipList.Count > 1) { MyAPIGateway.Utilities.ShowMessage("deleteship", "{0} Ships match that name.", currentShipList.Count); return true; } MyAPIGateway.Utilities.ShowMessage("deleteship", "Ship name not found."); return true; } return false; }
private static string GetRequestedByFromChangeset(IEnumerable<IBuildInformationNode> changesets) { var users = new HashSet<String>(); foreach (var changeset in changesets) users.Add(changeset.Fields["CheckedInBy"]); var count = users.Count(); if (count > 1) return "(Multiple Users)"; return users.First(); }
//Validate that the we are using valid colors and set appropriate defaults if not. private void CheckForInvalidColorData() { HashSet<Color> validColors = new HashSet<Color>(); validColors.AddRange(TargetNodes.SelectMany(x => ColorModule.getValidColorsForElementNode(x, true))); if (validColors.Any() && (!validColors.Contains(_data.StaticColor) || !_data.ColorGradient.GetColorsInGradient().IsSubsetOf(validColors))) //Discrete colors specified { _data.ColorGradient = new ColorGradient(validColors.DefaultIfEmpty(Color.White).First()); _data.StaticColor = validColors.First(); } }
static void Main() { #if DEBUG Console.SetIn(new System.IO.StreamReader("../../input.txt")); #endif var nodes = new HashSet<int>(); var edges = new List<KeyValuePair<Tuple<int, int>, int>>(); foreach (int i in Enumerable.Range(0, int.Parse(Console.ReadLine()))) { int[] parts = Console.ReadLine().Split().Select(int.Parse).ToArray(); nodes.Add(parts[0]); nodes.Add(parts[1]); edges.Add(new KeyValuePair<Tuple<int, int>, int>( new Tuple<int, int>(parts[0], parts[1]), parts[2] )); } var trees = new HashSet<HashSet<int>>(); foreach (int node in nodes) { var tree = new HashSet<int>(); tree.Add(node); trees.Add(tree); } int result = 0; foreach (var currentEdge in edges.OrderBy(kvp => kvp.Value)) { var tree1 = trees.First(tree => tree.Contains(currentEdge.Key.Item1)); var tree2 = trees.Last(tree => tree.Contains(currentEdge.Key.Item2)); if (tree1 == tree2) continue; tree1.UnionWith(tree2); trees.Remove(tree2); result += currentEdge.Value; if (trees.Count == 1) break; } Console.WriteLine(result); }
public static IEnumerable<IEnumerable<Vertex>> GetConnectedComponents( this IGraph graph) { HashSet<Vertex> remainingVertices = new HashSet<Vertex>(graph.Vertices()); while (remainingVertices.Count > 0) { IEnumerable<Vertex> componentVertices = graph.BreadthFirstTreeTraversal(remainingVertices.First()); remainingVertices.ExceptWith(componentVertices); yield return componentVertices; } }
public void ShouldCreateCSharpAssemblyInfoFileAtPathWhenNotExistsAndEnsureAssemblyInfo() { var fileSystem = new TestFileSystem(); const string workingDir = "C:\\Testing"; ISet<string> assemblyInfoFile = new HashSet<string> { @"src\Project\Properties\VersionAssemblyInfo.cs" }; var fullPath = Path.Combine(workingDir, assemblyInfoFile.First()); var variables = VariableProvider.GetVariablesFor(SemanticVersion.Parse("1.0.0", "v"), new TestEffectiveConfiguration(), false); using (new AssemblyInfoFileUpdate(new Arguments { EnsureAssemblyInfo = true, UpdateAssemblyInfo = true, UpdateAssemblyInfoFileName = assemblyInfoFile }, workingDir, variables, fileSystem)) { fileSystem.ReadAllText(fullPath).ShouldMatchApproved(); } }
public CommandTarget ParseTarget(string target) { if (string.IsNullOrEmpty(target)) return new FullSuiteTarget(suite); else { if (suite.HasProduct(target)) { var product = suite.GetProduct(target); return new ProductTarget(product); } if (suite.HasModule(target)) { var module = suite.GetModule(target); return new ModuleTarget(module); } else { var matches = new HashSet<Project>(); // Looking for modulename.projectname matches foreach (var module in suite.Modules) { if (target.StartsWith(module.Name + '.', StringComparison.InvariantCultureIgnoreCase)) { string projectName = target.Substring(module.Name.Length + 1); if (module.HasProject(projectName)) matches.Add(module.GetProject(projectName)); else if (module.HasTestProject(projectName)) matches.Add(module.GetTestProject(projectName)); } } // If there is only one match if (matches.Count == 1) { var project = matches.First(); return new ProjectTarget(project); } else { if (matches.Count > 1) throw new ArgumentException( "The given module and project name identifies more than one projects", "target"); else throw new ArgumentException("The given project does not exist", "target"); } } } }
public PathData pathTo(Pos target, Pos currentLocation, Tile[][] board, int spikeCost = 5) { Dictionary<string, InternalTile> Navigated = new Dictionary<string, InternalTile>(); HashSet<InternalTile> Closed = new HashSet<InternalTile>(); InternalTile beginning = new InternalTile() { TilePos = currentLocation, Weight = 0 }; HashSet<InternalTile> Opened = new HashSet<InternalTile> { beginning }; Dictionary<string, int> Scores = new Dictionary<string, int> { {GetKey(beginning.TilePos.x, beginning.TilePos.y), beginning.Weight} }; Dictionary<string, float> FullScores = new Dictionary<string, float> { {GetKey(beginning.TilePos.x, beginning.TilePos.y), GetDistance(currentLocation, target)} }; while (Opened.Any()) { InternalTile lowest = Opened.First(tile => GetKey(tile.TilePos.x, tile.TilePos.y) == GetLowestCostTile(FullScores, Opened)); if (lowest.TilePos.x == target.x && lowest.TilePos.y == target.y) { return ReconstructPath(Navigated, target); } Opened.Remove(lowest); Closed.Add(lowest); foreach (Pos neighbor in GetNeighbors(lowest.TilePos, board.Length, board[0].Length)) { if (Closed.Any(tile => tile.TilePos.x == neighbor.x && tile.TilePos.y == neighbor.y)) { continue; } string neighborKey = GetKey(neighbor.x, neighbor.y); int curScore = Scores[GetKey(lowest.TilePos.x, lowest.TilePos.y)] + 1; if (!Opened.Any(tile => tile.TilePos.x == neighbor.x && tile.TilePos.y == neighbor.y)) { Opened.Add(new InternalTile { TilePos = new Pos { x = neighbor.x, y = neighbor.y } }); } else if (curScore >= (Scores.ContainsKey(neighborKey) ? Scores[neighborKey] : int.MaxValue)) { continue; } Navigated.Add(neighborKey, lowest); Scores.Add(neighborKey, curScore); FullScores.Add(neighborKey, curScore + GetDistance(neighbor, target)); } } return null; }
public static List<NavigationItem> GetNavigationItems(Func<string, string> resolveUrl = null, Func<NavigationItemAttribute, bool> filter = null) { var result = new List<NavigationItem>(); var menuItems = GetNavigationItemAttributes(filter); var remaining = new HashSet<string>(); foreach (var item in menuItems) remaining.Add(item.Key); Action<List<NavigationItem>, NavigationItemAttribute> processMenu = null; processMenu = (parent, menu) => { var path = (menu.Category.IsEmptyOrNull() ? "" : (menu.Category + "/")); path += (menu.Title.TrimToNull() ?? ""); remaining.Remove(path); var section = new NavigationItem { Title = menu.Title, Url = (!string.IsNullOrEmpty(menu.Url) && resolveUrl != null) ? resolveUrl(menu.Url) : menu.Url, IconClass = menu.IconClass.TrimToNull(), Target = menu.Target.TrimToNull() }; bool isAuthorizedSection = !menu.Url.IsEmptyOrNull() && (menu.Permission.IsEmptyOrNull() || Authorization.HasPermission(menu.Permission)); var children = menuItems[path]; foreach (var child in children) processMenu(section.Children, child); if (section.Children.Count > 0 || isAuthorizedSection) parent.Add(section); }; remaining.Remove(""); foreach (var menu in menuItems[""]) processMenu(result, menu); while (remaining.Count > 0) { var first = remaining.First(); remaining.Remove(first); var menu = new NavigationMenuAttribute(Int32.MaxValue, first); processMenu(result, menu); } return result; }
/// <summary> /// Generic path finding. Works on any map type. /// </summary> /// <returns>A list of paths to take to move from the start node to the goal node using the minimum number of paths, or null if no such list exists.</returns> public static List<Path> Find(IGameMap map, IMapNode start, IMapNode goal) { if (start == goal) return new List<Path>(); Dictionary<IMapNode, int> distance = new Dictionary<IMapNode, int>(); Dictionary<IMapNode, Path> previous = new Dictionary<IMapNode, Path>(); ICollection<IMapNode> unvisited = new HashSet<IMapNode>(); foreach (IMapNode node in map.Nodes) { distance.Add(node, Int32.MaxValue); previous.Add(node, null); unvisited.Add(node); } distance[start] = 0; while (unvisited.Count > 0) { IMapNode currentNode = unvisited.First(x => distance[x] == unvisited.Min(y => distance[y])); unvisited.Remove(currentNode); if (currentNode == goal) break; foreach (Path p in map.GetPathsFrom(currentNode)) { IMapNode neighbor = p.To; int alternateDistance = distance[currentNode] + 1; if (alternateDistance < distance[neighbor]) { distance[neighbor] = alternateDistance; previous[neighbor] = p; } } } List<Path> path = new List<Path>(); Path prevPath = previous[goal]; do { path.Insert(0, prevPath); prevPath = previous[prevPath.From]; } while (prevPath != null); return path; }
public void DistributedCacheEmulator_Get_WorksMultiThreaded() { var cache = new DistributedCacheEmulator(); cache.Set("MultiThreadedValue", 12345); var values = new HashSet<int>(); Parallel.ForEach(new int[100], x => { var result = cache.Get<int>("MultiThreadedValue"); lock (values) values.Add(result); }); Assert.Equal(12345, values.First()); }
public SuggestedMoves GetPath(Color[,] board) { //Get the farthest nodes TreeNode head = MapBuilder.BuildTree(board); ISet<TreeNode> farthestNodes = new HashSet<TreeNode>(); int highestDepth = 0; foreach (TreeNode node in head.BFS()) //DFS would be better { int depth = GetDepth(node); if (depth > highestDepth) { highestDepth = depth; farthestNodes.Clear(); farthestNodes.Add(node); } else if (depth == highestDepth) { farthestNodes.Add(node); } } Console.Write("Farthest nodes are "); farthestNodes.Select(n => n.Color).ToList().ForEach(c => Console.Write(c + ", ")); Console.WriteLine("\r\nFarthest node is " + GetDepth(farthestNodes.First()) + " away from the current"); //get the color that would step towards each color IDictionary<Color, int> tally = new Dictionary<Color, int>(); foreach (TreeNode farthestNode in farthestNodes) { TreeNode currentNode = farthestNode; while (currentNode.Parent != head) { currentNode = currentNode.Parent; } if (!tally.ContainsKey(currentNode.Color)) { tally.Add(currentNode.Color, 1); } else { tally[currentNode.Color]++; } } SuggestedMoves suggestedMoves = new SuggestedMoves(); suggestedMoves.AddFirst(new SuggestedMove(tally.OrderByDescending(kvp => kvp.Value).Select(n => n.Key))); return suggestedMoves; }
private Tile[] getMatchingNeighbors() { HashSet<Tile> border = new HashSet<Tile>(); border.Add(this); HashSet<Tile> explored = new HashSet<Tile>(); while (border.Count > 0) { Tile cur = border.First(); border.Remove(cur); explored.Add(cur); addIf(border, explored, cur.up); addIf(border, explored, cur.down); addIf(border, explored, cur.left); addIf(border, explored, cur.right); } return explored.ToArray(); }
private List <BlockLoc> PathToMakeGoalsAvailable(IslandPathingProfile startProfile, ref BlockLoc startLoc, out BlockLoc lowestAvailableBlockFound, HashSet <BlockLoc> blocksToMakeAvailable, int heightOfEntity) { lowestAvailableBlockFound = new BlockLoc(); if (blocksToMakeAvailable.Count == 0) { List <BlockLoc> noPathResult = new List <BlockLoc>(); noPathResult.Add(startLoc); return(noPathResult); } PathNodePriorityQueue openNodes = new PathNodePriorityQueue(); HashSet <BlockLoc> visitedLocations = new HashSet <BlockLoc>(); PathNodeForFindingLowGoals lowestPathNodeFoundSoFar = null; openNodes.insertNode(new PathNodeForFindingLowGoals(null, startLoc, 0, blocksToMakeAvailable.First(), int.MaxValue)); IslandPathingProfile profile = startProfile; while (openNodes.size() > 0) { PathNode from = openNodes.pop(); List <BlockLoc> nextSteps = profile.getSpacesThatCanBeMovedToFrom(from.loc, heightOfEntity); for (int i = nextSteps.Count - 1; i >= 0; i--) { if (visitedLocations.Contains(nextSteps[i])) { nextSteps.RemoveAt(i); } } if (((PathNodeForFindingLowGoals)from).hasExaustedPostGoalSteps() || (((PathNodeForFindingLowGoals)from).isDescendedFromNodeAtAGoal() && nextSteps.Count == 0)) { List <BlockLoc> finalPath = getPathListFromEnd(lowestPathNodeFoundSoFar); finalPath.RemoveAt(0); finalPath.Add(lowestPathNodeFoundSoFar.loc); Console.WriteLine(finalPath.Count); return(finalPath); } //adding new nodes to the openNodes unmippedArray foreach (BlockLoc next in nextSteps) { PathNodeForFindingLowGoals toAdd = new PathNodeForFindingLowGoals (from, next, from.costToGetHere + 1, blocksToMakeAvailable.First(), ((PathNodeForFindingLowGoals)from).getStepsUntilGiveUpOnFindingBetterBlock() - 1); HashSet <BlockLoc> blocksAvailableFromToAdd = profile.getBlocksAvailableForWorkFromFootLoc(toAdd.loc); foreach (BlockLoc available in blocksAvailableFromToAdd) { if (blocksToMakeAvailable.Contains(available)) { if (lowestPathNodeFoundSoFar == null || available.WSY() < lowestAvailableBlockFound.WSY()) { lowestAvailableBlockFound = available; toAdd.setStepCounterWhenNodeIsOnGoal(); lowestPathNodeFoundSoFar = toAdd; } } } //toAdd. toAdd.incrementPostGoalSteps(); // // Compositer.addFlagForThisFrame(toAdd.xLowZ.toWorldSpaceVector3(), "white"); openNodes.insertNode(toAdd); visitedLocations.Add(next); } } if (lowestPathNodeFoundSoFar != null) { List <BlockLoc> finalPath = getPathListFromEnd(lowestPathNodeFoundSoFar); finalPath.RemoveAt(0); finalPath.Add(lowestPathNodeFoundSoFar.loc); Console.WriteLine(finalPath.Count); return(finalPath); } return(null);//no path found }
/// <summary> /// https://projecteuler.net/problem=51 --skip=12,15,20,26,60 /// </summary> /// <param name="arguments"></param> /// <returns></returns> public static Result PrimeDigitReplacements(Problem arguments) { /// <summary> /// Return true if all digits in a numbers are the same, except those that repeat the /// same digit and it just increases/decreases from one to the other. For example /// CompareNumbers(124353,124757)=true, CompareNumbers(124353,124151)=true, /// CompareNumbers(121313,121757)=false. Also if numbers are the same return false. /// </summary> /// <param name="left"></param> /// <param name="right"></param> /// <returns></returns> bool NumbersBelongToSameDigitFamily(int left, int right, int repeating_sequences = 1) { var repeating = new Dictionary <char, List <int> >(); var sleft = left.ToString(); var sright = right.ToString(); if (left == right) { return(false); } if (sleft.Length != sright.Length) { return(false); } for (var i = 0; i < sleft.Length; i++) { var rdigit = sright[i]; var ldigit = sleft[i]; if ((repeating.Count < repeating_sequences) && !repeating.ContainsKey(ldigit) && (sleft.Count(c => c == ldigit) >= 2)) { if (repeating.ContainsKey(ldigit) && repeating[ldigit].Contains(i)) { continue; } else { repeating.Add(ldigit, new List <int>()); foreach (var(digit, j) in sleft.Enumerate()) { if (digit == ldigit) { repeating[ldigit].Add(j); } } bool match = true; foreach (var k in repeating[ldigit]) { match = match && sright[k] == rdigit; if (!match) { break; } } if (!match) { return(false); } } } else if (repeating.ContainsKey(ldigit) && repeating[ldigit].Contains(i)) { continue; } else if (rdigit == ldigit) { continue; } else if (ldigit != rdigit) { return(false); } } return(true); } // get all six digit primes var primes = UtilityMath.GeneratePrimes(100000, 1000000).ToArray(); var repeats = new List <int>(); // get primes with repeated digits (not necessarily adjacent digits) for (int i = 0; i < primes.Length; i++) { var counts = UtilityMath.GetRepeatedDigitCounts(primes[i]); if (counts.Count(c => c > 1) >= 1) // choose only primes with at least one repeatig digit { repeats.Add(primes[i]); } } // get first eight number prime-family HashSet <int> family = null; foreach (var prime in repeats) { family = new HashSet <int> { prime }; foreach (var n in repeats) { if (NumbersBelongToSameDigitFamily(prime, n)) { family.Add(n); } if (family.Count == 8) { break; } } if (family.Count == 8) { break; } } primes = null; repeats = null; var answer = family?.First().ToString(); var message = string.Format("The smallest prime which, by replacing part of the number (not necessarily adjacent digits) with the same digit, is part of an eight prime value family is {0}.", answer); if (Answers[arguments.Id] != answer) { message += string.Format(" => INCORRECT ({0})", Answers[arguments.Id]); } var r = new Result(arguments.Id, message) { Answer = answer }; return(r); }
public IReadOnlyList <TVertex> TopologicalSort( [CanBeNull] Func <TVertex, TVertex, IEnumerable <TEdge>, bool> canBreakEdge, [CanBeNull] Func <IReadOnlyList <Tuple <TVertex, TVertex, IEnumerable <TEdge> > >, string> formatCycle) { var sortedQueue = new List <TVertex>(); var predecessorCounts = new Dictionary <TVertex, int>(); foreach (var vertex in _vertices) { foreach (var outgoingNeighbor in GetOutgoingNeighbors(vertex)) { if (predecessorCounts.ContainsKey(outgoingNeighbor)) { predecessorCounts[outgoingNeighbor]++; } else { predecessorCounts[outgoingNeighbor] = 1; } } } foreach (var vertex in _vertices) { if (!predecessorCounts.ContainsKey(vertex)) { sortedQueue.Add(vertex); } } var index = 0; while (sortedQueue.Count < _vertices.Count) { while (index < sortedQueue.Count) { var currentRoot = sortedQueue[index]; foreach (var successor in GetOutgoingNeighbors(currentRoot).Where(neighbor => predecessorCounts.ContainsKey(neighbor))) { // Decrement counts for edges from sorted vertices and append any vertices that no longer have predecessors predecessorCounts[successor]--; if (predecessorCounts[successor] == 0) { sortedQueue.Add(successor); predecessorCounts.Remove(successor); } } index++; } // Cycle breaking if (sortedQueue.Count < _vertices.Count) { var broken = false; var candidateVertices = predecessorCounts.Keys.ToList(); var candidateIndex = 0; // Iterate over the unsorted vertices while ((candidateIndex < candidateVertices.Count) && !broken && (canBreakEdge != null)) { var candidateVertex = candidateVertices[candidateIndex]; // Find vertices in the unsorted portion of the graph that have edges to the candidate var incomingNeighbors = GetIncomingNeighbors(candidateVertex) .Where(neighbor => predecessorCounts.ContainsKey(neighbor)).ToList(); foreach (var incomingNeighbor in incomingNeighbors) { // Check to see if the edge can be broken if (canBreakEdge(incomingNeighbor, candidateVertex, _successorMap[incomingNeighbor][candidateVertex])) { predecessorCounts[candidateVertex]--; if (predecessorCounts[candidateVertex] == 0) { sortedQueue.Add(candidateVertex); predecessorCounts.Remove(candidateVertex); broken = true; break; } } } candidateIndex++; } if (!broken) { // Failed to break the cycle var currentCycleVertex = _vertices.First(v => predecessorCounts.ContainsKey(v)); var cycle = new List <TVertex> { currentCycleVertex }; var finished = false; while (!finished) { // Find a cycle foreach (var predecessor in GetIncomingNeighbors(currentCycleVertex) .Where(neighbor => predecessorCounts.ContainsKey(neighbor))) { if (predecessorCounts[predecessor] != 0) { predecessorCounts[currentCycleVertex] = -1; currentCycleVertex = predecessor; cycle.Add(currentCycleVertex); finished = predecessorCounts[predecessor] == -1; break; } } } cycle.Reverse(); ThrowCycle(cycle, formatCycle); } } } return(sortedQueue); }
private RegisteredObject FetchRegisteredObject(Type interfaceType) => _registeredObjects.First(obj => obj.TypeToResolve == interfaceType);
public static string FeatureSetToXpath(IEnumerable <Feature> featureSet) { /////Need to be rewritten to optimize code and performance Dictionary <string, Feature> featureMapping = new Dictionary <string, Feature>(); HashSet <string> doneFeatures = new HashSet <string>(); HashSet <string> conditionSet = new HashSet <string>(); foreach (Feature currFeature in featureSet) { if (currFeature.type.Equals(Feature.FeatureType.Selector)) { if (!featureMapping.ContainsKey(currFeature.feature.First())) { featureMapping.Add(currFeature.feature.First(), currFeature); } } } foreach (Feature currFeature in featureSet) { if (currFeature.type.Equals(Feature.FeatureType.Order)) { if (!currFeature.sameIndex) { string currSelector = ""; foreach (string curr in currFeature.feature) { Feature cf = new Feature(); featureMapping.TryGetValue(curr, out cf); if (cf.nodeNameFeature) { currSelector = currSelector + ((currSelector.Equals(""))?"":"//") + cf.feature.First(); } else { currSelector = currSelector + ((currSelector.Equals("")) ? "" : "//") + "*[" + cf.feature.First() + "]"; } } conditionSet.Add(currSelector); } else { string currSelector = ""; string nodeName = "*"; string nodeCondition = ""; foreach (string curr in currFeature.feature) { Feature cf = new Feature(); featureMapping.TryGetValue(curr, out cf); if (cf.nodeNameFeature) { nodeName = cf.feature.First(); } else { nodeCondition = nodeCondition + ((nodeCondition.Equals("")) ? "" : " and ") + cf.feature.First(); } } currSelector = "" + nodeName; if (!nodeCondition.Equals("")) { currSelector = currSelector + "[" + nodeCondition + "]"; } conditionSet.Add(currSelector); } foreach (string f in currFeature.feature) { doneFeatures.Add(f); } } } foreach (string k in featureMapping.Keys) { if (doneFeatures.Contains(k)) { continue; } doneFeatures.Add(k); Feature curr = new Feature(); featureMapping.TryGetValue(k, out curr); if (curr.nodeNameFeature) { conditionSet.Add((curr.Axe.Equals(Feature.Axes.Self) ? "self::" : "ancestor-or-self::") + curr.feature.First()); } else { conditionSet.Add((curr.Axe.Equals(Feature.Axes.Self) ? "self::" : "ancestor-or-self::") + "*[" + curr.feature.First() + "]"); } } string xpath = ""; if (featureSet.Count() > 1) { xpath = "//*["; bool firstiteration = true; foreach (string cond in conditionSet) { if (!firstiteration) { xpath = xpath + " ][ "; } xpath = xpath + cond; firstiteration = false; } xpath = xpath + "]"; } else { string currCondition = conditionSet.First(); if (currCondition.Contains("ancestor-or-self::")) { currCondition = currCondition.Replace("ancestor-or-self::", ""); xpath = "//" + currCondition + " | //" + currCondition + "//*"; } else { currCondition = currCondition.Replace("self::", ""); xpath = "//" + currCondition; } } return(xpath); }
public XamlBinding GetBinding(PerspexObject po, PerspexProperty pp) { return(_bindings.First(xamlBinding => xamlBinding.Target == po && xamlBinding.TargetProperty == pp)); }
private string getCPPCheckArgs(SourceFilesWithConfiguration configuredFiles, bool analysisOnSavedFile, string tempFileName) { if (!configuredFiles.Any()) { Debug.Fail("Empty files list!"); return(""); } Debug.Assert(_numCores > 0); String cppheckargs = Properties.Settings.Default.DefaultArguments; if (Properties.Settings.Default.SeveritiesString.Length != 0) { cppheckargs += " --enable=" + Properties.Settings.Default.SeveritiesString; } HashSet <string> suppressions = new HashSet <string>(Properties.Settings.Default.SuppressionsString.Split(',')); suppressions.Add("unmatchedSuppression"); HashSet <string> skippedFilesMask = new HashSet <string>(); HashSet <string> skippedIncludeMask = new HashSet <string>(); SuppressionsInfo unitedSuppressionsInfo = readSuppressions(ICodeAnalyzer.SuppressionStorage.Global); unitedSuppressionsInfo.UnionWith(readSuppressions(ICodeAnalyzer.SuppressionStorage.Solution)); var filesToAnalyze = configuredFiles.Files; // Creating the list of all different project locations (no duplicates) HashSet <string> projectPaths = new HashSet <string>(); // enforce uniqueness on the list of project paths foreach (var file in filesToAnalyze) { projectPaths.Add(file.BaseProjectPath); } Debug.Assert(projectPaths.Count == 1); _projectBasePath = projectPaths.First(); _projectName = filesToAnalyze.First().ProjectName; // Creating the list of all different suppressions (no duplicates) foreach (var path in projectPaths) { unitedSuppressionsInfo.UnionWith(readSuppressions(SuppressionStorage.Project, path, filesToAnalyze.First().ProjectName)); } cppheckargs += (" --relative-paths=\"" + _projectBasePath + "\""); cppheckargs += (" -j " + _numCores.ToString()); if (Properties.Settings.Default.InconclusiveChecksEnabled) { cppheckargs += " --inconclusive "; } suppressions.UnionWith(unitedSuppressionsInfo.SuppressionLines); foreach (string suppression in suppressions) { if (!String.IsNullOrWhiteSpace(suppression)) { cppheckargs += (" --suppress=" + suppression); } } if (!(analysisOnSavedFile && Properties.Settings.Default.IgnoreIncludePaths)) { // We only add include paths once, and then specify a set of files to check HashSet <string> includePaths = new HashSet <string>(); foreach (var file in filesToAnalyze) { if (!matchMasksList(file.FilePath, unitedSuppressionsInfo.SkippedFilesMask)) { includePaths.UnionWith(file.IncludePaths); } } includePaths.Add(filesToAnalyze.First().BaseProjectPath); // Fix for #60 foreach (string path in includePaths) { if (!matchMasksList(path, unitedSuppressionsInfo.SkippedIncludesMask)) { String includeArgument = " -I\"" + path + "\""; cppheckargs = cppheckargs + " " + includeArgument; } } } using (StreamWriter tempFile = new StreamWriter(tempFileName)) { foreach (SourceFile file in filesToAnalyze) { if (!matchMasksList(file.FilePath, unitedSuppressionsInfo.SkippedFilesMask)) { tempFile.WriteLine(file.FilePath); } } } cppheckargs += " --file-list=\"" + tempFileName + "\""; if ((analysisOnSavedFile && Properties.Settings.Default.FileOnlyCheckCurrentConfig) || (!analysisOnSavedFile && Properties.Settings.Default.ProjectOnlyCheckCurrentConfig)) // Only checking current macros configuration (for speed) { cppheckargs = cppheckargs.Replace("--force", ""); // Creating the list of all different macros (no duplicates) HashSet <string> macros = new HashSet <string>(); // TODO: handle /Zc:__cplusplus // https://devblogs.microsoft.com/cppblog/msvc-now-correctly-reports-__cplusplus/ macros.Add("__cplusplus=199711L"); // At least in VS2012, this is still 199711L // Assuming all files passed here are from the same project / same toolset, which should be true, so peeking the first file for global settings switch (filesToAnalyze.First().vcCompilerVersion) { case SourceFile.VCCompilerVersion.vc2003: macros.Add("_MSC_VER=1310"); break; case SourceFile.VCCompilerVersion.vc2005: macros.Add("_MSC_VER=1400"); break; case SourceFile.VCCompilerVersion.vc2008: macros.Add("_MSC_VER=1500"); break; case SourceFile.VCCompilerVersion.vc2010: macros.Add("_MSC_VER=1600"); break; case SourceFile.VCCompilerVersion.vc2012: macros.Add("_MSC_VER=1700"); break; case SourceFile.VCCompilerVersion.vc2013: macros.Add("_MSC_VER=1800"); break; case SourceFile.VCCompilerVersion.vc2015: macros.Add("_MSC_VER=1900"); break; case SourceFile.VCCompilerVersion.vc2017: macros.Add("_MSC_VER=1916"); break; case SourceFile.VCCompilerVersion.vc2019: macros.Add("_MSC_VER=1926"); macros.Add("_MSC_FULL_VER=192628808"); break; } foreach (var file in filesToAnalyze) { macros.UnionWith(file.Macros); } macros.Add("WIN32"); macros.Add("_WIN32"); CPPCheckPluginPackage.Instance.JoinableTaskFactory.Run(async() => { if (await configuredFiles.is64bitConfigurationAsync()) { macros.Add("_M_X64"); macros.Add("_WIN64"); } else { macros.Add("_M_IX86"); } if (await configuredFiles.isDebugConfigurationAsync()) { macros.Add("_DEBUG"); } }); foreach (string macro in macros) { if (!String.IsNullOrEmpty(macro) && !macro.Contains(" ") /* macros with spaces are invalid in VS */) { String macroArgument = " -D" + macro; cppheckargs += macroArgument; } } HashSet <string> macrosToUndefine = new HashSet <string>(); foreach (var file in filesToAnalyze) { macrosToUndefine.UnionWith(file.MacrosToUndefine); } foreach (string macro in macrosToUndefine) { if (!String.IsNullOrEmpty(macro) && !macro.Contains(" ") /* macros with spaces are invalid in VS */) { String macroUndefArgument = " -U" + macro; cppheckargs += macroUndefArgument; } } } else if (!cppheckargs.Contains("--force")) { cppheckargs += " --force"; } return(cppheckargs); }
/// <summary>Distributes download job's headers to peers that can provide blocks represented by those headers.</summary> /// <remarks> /// If some of the blocks from the job can't be provided by any peer those headers will be added to a <param name="failedHashes"></param>. /// <para> /// Have to be locked by <see cref="queueLock"/>. /// </para> /// <para> /// Node's quality score is being considered as a weight during the random distribution of the hashes to download among the nodes. /// </para> /// </remarks> /// <param name="downloadJob">Download job to be partially of fully consumed.</param> /// <param name="failedHashes">List of failed hashes which will be extended in case there is no peer to claim required hash.</param> /// <param name="emptySlots">Number of empty slots. This is the maximum number of assignments that can be created.</param> /// <returns>List of downloads that were distributed between the peers.</returns> private List <AssignedDownload> DistributeHeadersLocked(DownloadJob downloadJob, List <uint256> failedHashes, int emptySlots) { var newAssignments = new List <AssignedDownload>(); HashSet <IBlockPullerBehavior> peerBehaviors; lock (this.peerLock) { peerBehaviors = new HashSet <IBlockPullerBehavior>(this.pullerBehaviorsByPeerId.Values); } bool jobFailed = false; if (peerBehaviors.Count == 0) { this.logger.LogDebug("There are no peers that can participate in download job distribution! Job ID {0} failed.", downloadJob.Id); jobFailed = true; } int lastSucceededIndex = -1; for (int index = 0; (index < downloadJob.Headers.Count) && (index < emptySlots) && !jobFailed; index++) { ChainedHeader header = downloadJob.Headers[index]; while (!jobFailed) { // Weighted random selection based on the peer's quality score. double sumOfQualityScores = peerBehaviors.Sum(x => x.QualityScore); double scoreToReachPeer = this.random.NextDouble() * sumOfQualityScores; IBlockPullerBehavior selectedBehavior = peerBehaviors.First(); foreach (IBlockPullerBehavior peerBehavior in peerBehaviors) { if (peerBehavior.QualityScore >= scoreToReachPeer) { selectedBehavior = peerBehavior; break; } scoreToReachPeer -= peerBehavior.QualityScore; } INetworkPeer attachedPeer = selectedBehavior.AttachedPeer; // Behavior's tip can't be null because we only have behaviors inserted in the behaviors structure after the tip is set. if ((attachedPeer != null) && (selectedBehavior.Tip.FindAncestorOrSelf(header) != null)) { int peerId = attachedPeer.Connection.Id; // Assign to this peer. newAssignments.Add(new AssignedDownload() { PeerId = peerId, JobId = downloadJob.Id, AssignedTime = this.dateTimeProvider.GetUtcNow(), Header = header }); lastSucceededIndex = index; this.logger.LogTrace("Block '{0}' was assigned to peer ID {1}.", header.HashBlock, peerId); break; } else { // Peer doesn't claim this header. peerBehaviors.Remove(selectedBehavior); if (peerBehaviors.Count != 0) { continue; } jobFailed = true; this.logger.LogDebug("Job {0} failed because there is no peer claiming header '{1}'.", downloadJob.Id, header); } } } if (!jobFailed) { downloadJob.Headers.RemoveRange(0, lastSucceededIndex + 1); } else { int removeFrom = (lastSucceededIndex == -1) ? 0 : lastSucceededIndex + 1; IEnumerable <uint256> failed = downloadJob.Headers.GetRange(removeFrom, downloadJob.Headers.Count - removeFrom).Select(x => x.HashBlock); failedHashes.AddRange(failed); downloadJob.Headers.Clear(); } return(newAssignments); }
private List <PlayerSetting?> ParseReplayMetaToPlayerSettings(ReplaySlot[] slots) { var random = new Random(); // TODO: set the correct factions & colors var pSettings = new List <PlayerSetting?>(); var availableColors = new HashSet <MultiplayerColor>(AssetStore.MultiplayerColors); foreach (var slot in slots) { var colorIndex = (int)slot.Color; if (colorIndex >= 0 && colorIndex < AssetStore.MultiplayerColors.Count) { availableColors.Remove(AssetStore.MultiplayerColors.GetByIndex(colorIndex)); } } foreach (var slot in slots) { var owner = PlayerOwner.Player; if (slot.SlotType == ReplaySlotType.Empty) { pSettings.Add(null); continue; } var factionIndex = slot.Faction; if (factionIndex == -1) // random { var maxFactionIndex = AssetStore.PlayerTemplates.Count; var minFactionIndex = 2; // 0 and 1 are civilian and observer var diff = maxFactionIndex - minFactionIndex; factionIndex = minFactionIndex + (random.Next() % diff); } var faction = AssetStore.PlayerTemplates.GetByIndex(factionIndex); var color = new ColorRgb(0, 0, 0); var colorIndex = (int)slot.Color; if (colorIndex >= 0 && colorIndex < AssetStore.MultiplayerColors.Count) { color = AssetStore.MultiplayerColors.GetByIndex((int)slot.Color).RgbColor; } else { var multiplayerColor = availableColors.First(); color = multiplayerColor.RgbColor; availableColors.Remove(multiplayerColor); } if (slot.SlotType == ReplaySlotType.Computer) { switch (slot.ComputerDifficulty) { case ReplaySlotDifficulty.Easy: owner = PlayerOwner.EasyAi; break; case ReplaySlotDifficulty.Medium: owner = PlayerOwner.MediumAi; break; case ReplaySlotDifficulty.Hard: owner = PlayerOwner.HardAi; break; } } pSettings.Add(new PlayerSetting(slot.StartPosition, faction, color, owner, slot.HumanName)); } return(pSettings); }
public async Task WriteHashSetTOfHashSetT() { HashSet <HashSet <int> > input = new HashSet <HashSet <int> >(new List <HashSet <int> > { new HashSet <int>(new List <int>() { 1, 2 }), new HashSet <int>(new List <int>() { 3, 4 }) }); string json = await Serializer.SerializeWrapper(input); // Because order isn't guaranteed, roundtrip data to ensure write was accurate. input = await Serializer.DeserializeWrapper <HashSet <HashSet <int> > >(json); if (input.First().Contains(1)) { Assert.Equal(new HashSet <int> { 1, 2 }, input.First()); Assert.Equal(new HashSet <int> { 3, 4 }, input.Last()); } else { Assert.Equal(new HashSet <int> { 3, 4 }, input.First()); Assert.Equal(new HashSet <int> { 1, 2 }, input.Last()); } GenericHashSetWrapper <StringHashSetWrapper> input2 = new GenericHashSetWrapper <StringHashSetWrapper>(new List <StringHashSetWrapper> { new StringHashSetWrapper(new List <string>() { "1", "2" }), new StringHashSetWrapper(new List <string>() { "3", "4" }) }); json = await Serializer.SerializeWrapper(input2); // Because order isn't guaranteed, roundtrip data to ensure write was accurate. input2 = await Serializer.DeserializeWrapper <GenericHashSetWrapper <StringHashSetWrapper> >(json); if (input2.First().Contains("1")) { Assert.Equal(new StringHashSetWrapper(new List <string> { "1", "2" }), input2.First()); Assert.Equal(new StringHashSetWrapper(new List <string> { "3", "4" }), input2.Last()); } else { Assert.Equal(new StringHashSetWrapper(new List <string> { "3", "4" }), input2.First()); Assert.Equal(new StringHashSetWrapper(new List <string> { "1", "2" }), input2.Last()); } }
internal void Started() { startedAt = Interface.Oxide.Now; name = (plugins.Count < 2 ? plugins.First().Name : "plugins_") + Math.Round(Interface.Oxide.Now * 10000000f) + ".dll"; }
/// <summary> /// Smooting volume /// </summary> /// <param name="firstLayer">Base layer</param> /// <param name="smooth">Smothing layer</param> /// <returns>Tetrahedrons for smoothing</returns> List <Tetrahedron> Smoothing(List <List <Triangle> > firstLayer, List <List <Triangle> > smooth) { smooth.RemoveAll(x => x.Count == 0); List <Tetrahedron> result = new List <Tetrahedron>(); foreach (var triangles in firstLayer) { foreach (var triangle in triangles) { List <List <Triangle> > trianglesForSmoothByNodes = new List <List <Triangle> >(); List <Triangle> trianglesForSmooth = new List <Triangle>(); HashSet <Triangle> smoothingTriangles = new HashSet <Triangle>(); List <Node> boundNodes = new List <Node>(GetBoundNodes(triangle)); foreach (var smoothItem in smooth) { List <Node> nodes = new List <Node>(); foreach (var node in boundNodes) { var query = smoothItem .Where(tr => tr.Center.PX >= node.PX - step && tr.Center.PX <= node.PX + step && tr.Center.PY >= node.PY - step && tr.Center.PY <= node.PY + step) .ToList(); if (query.Count > 1) { HashSet <Node> nearNodes = new HashSet <Node>(); query. Select(x => x.NearNode(node)). ToList(). ForEach(it => nearNodes.Add(it)); if (nearNodes.Count > 1) { double min = Double.MaxValue; Node current = null; foreach (var item in nearNodes) { if (min > Distance(item, node)) { min = Distance(item, node); current = item; } } nodes.Add(current); } else { nodes.Add(nearNodes.First()); } } else if (query.Count > 0) { nodes.Add(query[0].NearNode(node)); } } foreach (var node in nodes) { double range = step * 2 / 3; var querySmoothTriangles = smoothItem .Where(tr => tr.Center.PX >= node.PX - range && tr.Center.PX <= node.PX + range && tr.Center.PY >= node.PY - range && tr.Center.PY <= node.PY + range) .ToList(); trianglesForSmoothByNodes.Add(querySmoothTriangles); trianglesForSmooth.AddRange(querySmoothTriangles); } if (boundNodes.Count > 1) { trianglesForSmooth.ForEach(tr => { if (trianglesForSmooth.Where(x => x.GetHashCode() == tr.GetHashCode()).Count() == 2) // see may be > 1 { smoothingTriangles.Add(tr); } }); List <List <int> > indSmooth = IndexesSmoothNodes(trianglesForSmoothByNodes, nodes, smoothingTriangles); if (smoothingTriangles.Count >= 1) { for (int i = 0; i < boundNodes.Count; i++) { result.AddRange(SmoothTetrahedronsByNodes(i, (i + 1) % boundNodes.Count, boundNodes, indSmooth, smoothingTriangles.ToList())); if (boundNodes.Count == 2) { break; } } } } } } } return(result); }
private void ProcessCoverage(Class @class, string fileName, string[] lines) { var codeElements = new List <CodeElement>(); int maxiumLineNumber = -1; var visitsByLine = new Dictionary <int, int>(); var branchesByLineNumber = new Dictionary <int, ICollection <Branch> >(); foreach (var line in lines) { var match = lineCoverageRegex.Match(line); if (match.Success) { int lineNumber = int.Parse(match.Groups["LineNumber"].Value, CultureInfo.InvariantCulture); maxiumLineNumber = Math.Max(maxiumLineNumber, lineNumber); string visitsText = match.Groups["Visits"].Value; if (visitsText != "-") { int visits = 0; if (visitsText != "#####" && visitsText != "=====") { visits = int.Parse(visitsText, CultureInfo.InvariantCulture); } if (visitsByLine.ContainsKey(lineNumber)) { visitsByLine[lineNumber] += visits; } else { visitsByLine[lineNumber] = visits; } } } else { match = branchCoverageRegex.Match(line); if (match.Success) { var branch = new Branch( match.Groups["Visits"].Success ? int.Parse(match.Groups["Visits"].Value, CultureInfo.InvariantCulture) : 0, match.Groups["Number"].Value); ICollection <Branch> branches = null; if (branchesByLineNumber.TryGetValue(maxiumLineNumber, out branches)) { HashSet <Branch> branchesHashset = (HashSet <Branch>)branches; if (branchesHashset.Contains(branch)) { // Not perfect for performance, but Hashset has no GetElement method branchesHashset.First(b => b.Equals(branch)).BranchVisits += branch.BranchVisits; } else { branches.Add(branch); } } else { branches = new HashSet <Branch>(); branches.Add(branch); branchesByLineNumber.Add(maxiumLineNumber, branches); } } else if (line.StartsWith("function ")) { string name = line.Substring(9, line.IndexOf(' ', 9) - 9); codeElements.Add(new CodeElement(name, CodeElementType.Method, maxiumLineNumber + 1, maxiumLineNumber + 1)); } } } int[] coverage = new int[maxiumLineNumber + 1]; LineVisitStatus[] lineVisitStatus = new LineVisitStatus[maxiumLineNumber + 1]; for (int i = 0; i < coverage.Length; i++) { coverage[i] = -1; } foreach (var kv in visitsByLine) { coverage[kv.Key] = kv.Value; if (lineVisitStatus[kv.Key] != LineVisitStatus.Covered) { bool partiallyCovered = false; ICollection <Branch> branchesOfLine = null; if (branchesByLineNumber.TryGetValue(kv.Key, out branchesOfLine)) { partiallyCovered = branchesOfLine.Any(b => b.BranchVisits == 0); } LineVisitStatus statusOfLine = kv.Value > 0 ? (partiallyCovered ? LineVisitStatus.PartiallyCovered : LineVisitStatus.Covered) : LineVisitStatus.NotCovered; lineVisitStatus[kv.Key] = (LineVisitStatus)Math.Max((int)lineVisitStatus[kv.Key], (int)statusOfLine); } } var file = new CodeFile(fileName, coverage, lineVisitStatus, branchesByLineNumber); foreach (var codeElement in codeElements) { file.AddCodeElement(codeElement); } @class.AddFile(file); }
public SomeModel HashSet_First() => _hashSetSource.First(m => m.Id == _id);
private async Task Farm() { do { Bot.ArchiLogger.LogGenericInfo(string.Format(Strings.GamesToIdle, GamesToFarm.Count, GamesToFarm.Sum(game => game.CardsRemaining), TimeRemaining.ToHumanReadable())); // Now the algorithm used for farming depends on whether account is restricted or not if (Bot.BotConfig.HoursUntilCardDrops > 0) { // If we have restricted card drops, we use complex algorithm Bot.ArchiLogger.LogGenericInfo(string.Format(Strings.ChosenFarmingAlgorithm, "Complex")); while (GamesToFarm.Count > 0) { // Initially we're going to farm games that passed our HoursUntilCardDrops // This block is almost identical to Simple algorithm, we just copy appropriate items from GamesToFarm into innerGamesToFarm HashSet <Game> innerGamesToFarm = GamesToFarm.Where(game => game.HoursPlayed >= Bot.BotConfig.HoursUntilCardDrops).ToHashSet(); while (innerGamesToFarm.Count > 0) { Game game = innerGamesToFarm.First(); if (!await IsPlayableGame(game).ConfigureAwait(false)) { GamesToFarm.Remove(game); innerGamesToFarm.Remove(game); continue; } if (await FarmSolo(game).ConfigureAwait(false)) { innerGamesToFarm.Remove(game); continue; } NowFarming = false; return; } // At this point we have no games past HoursUntilCardDrops anymore, so we're going to farm all other ones // In order to maximize efficiency, we'll take games that are closest to our HoursPlayed first // We must call ToList() here as we can't remove items while enumerating foreach (Game game in GamesToFarm.OrderByDescending(game => game.HoursPlayed).ToList()) { if (!await IsPlayableGame(game).ConfigureAwait(false)) { GamesToFarm.Remove(game); continue; } innerGamesToFarm.Add(game); // There is no need to check all games at once, allow maximum of MaxGamesPlayedConcurrently in this batch if (innerGamesToFarm.Count >= ArchiHandler.MaxGamesPlayedConcurrently) { break; } } // If we have no playable games to farm, we're done if (innerGamesToFarm.Count == 0) { break; } // Otherwise, we farm our innerGamesToFarm batch until any game hits HoursUntilCardDrops if (await FarmMultiple(innerGamesToFarm).ConfigureAwait(false)) { Bot.ArchiLogger.LogGenericInfo(string.Format(Strings.IdlingFinishedForGames, string.Join(", ", innerGamesToFarm.Select(game => game.AppID)))); } else { NowFarming = false; return; } } } else { // If we have unrestricted card drops, we use simple algorithm Bot.ArchiLogger.LogGenericInfo(string.Format(Strings.ChosenFarmingAlgorithm, "Simple")); while (GamesToFarm.Count > 0) { // In simple algorithm we're going to farm anything that is playable, regardless of hours Game game = GamesToFarm.First(); if (!await IsPlayableGame(game).ConfigureAwait(false)) { GamesToFarm.Remove(game); continue; } if (await FarmSolo(game).ConfigureAwait(false)) { continue; } NowFarming = false; return; } } } while ((await IsAnythingToFarm().ConfigureAwait(false)).GetValueOrDefault()); NowFarming = false; Bot.ArchiLogger.LogGenericInfo(Strings.IdlingFinished); await Bot.OnFarmingFinished(true).ConfigureAwait(false); }
/// <summary> /// Find the consensus nucleotide for a set of nucleotides. /// </summary> /// <param name="symbols">Set of sequence items.</param> /// <returns>Consensus nucleotide.</returns> public override byte GetConsensusSymbol(HashSet <byte> symbols) { if (symbols == null) { throw new ArgumentNullException("symbols"); } if (symbols.Count == 0) { throw new ArgumentException(Properties.Resource.SymbolCountZero); } // Validate that all are valid DNA symbols HashSet <byte> validValues = GetValidSymbols(); HashSet <byte> symbolsInUpperCase = new HashSet <byte>(); foreach (byte symbol in symbols) { if (!validValues.Contains(symbol)) { throw new ArgumentException(string.Format( CultureInfo.CurrentCulture, Properties.Resource.INVALID_SYMBOL, (char)symbol, Name)); } byte upperCaseSymbol = symbol; if (symbol >= 97 && symbol <= 122) { upperCaseSymbol = (byte)(symbol - 32); } symbolsInUpperCase.Add(upperCaseSymbol); } // Remove all gap symbols HashSet <byte> gapItems = null; this.TryGetGapSymbols(out gapItems); byte defaultGap = 0; this.TryGetDefaultGapSymbol(out defaultGap); symbolsInUpperCase.ExceptWith(gapItems); if (symbolsInUpperCase.Count == 0) { // All are gap characters, return default 'Gap' return(defaultGap); } else if (symbolsInUpperCase.Count == 1) { return(symbols.First()); } else { HashSet <byte> baseSet = new HashSet <byte>(); HashSet <byte> ambiguousSymbols; foreach (byte n in symbolsInUpperCase) { ambiguousSymbols = null; if (TryGetBasicSymbols(n, out ambiguousSymbols)) { baseSet.UnionWith(ambiguousSymbols); } else { // If not found in ambiguous map, it has to be base / unambiguous character baseSet.Add(n); } } byte returnValue; TryGetAmbiguousSymbol(baseSet, out returnValue); return(returnValue); } }
/// <summary> /// Return tokens from string base on delimiters /// /// Why not "string.Split(...)"? Because the delimiters are not returned and they /// are required for parsing /// /// Note: quoted string are supported /// /// </summary> /// <param name="self">string</param> /// <param name="delimiters">list of delimiters to used</param> /// <param name="stringComparison">type of string comparison</param> /// <returns>return list of fields + tokens</returns> public static IReadOnlyList <string> Tokenize(this string self, string[] delimiters, StringComparison stringComparison = StringComparison.Ordinal) { if (self == null || self == "" || delimiters == null || delimiters.Length == 0) { return(new string[0]); } var tokens = new List <string>(); var seperatorPos = new HashSet <int>(); bool insideQuote = false; StringBuilder str = new StringBuilder(); for (int i = 0; i < self.Length; i++) { if (self[i] == '"') { str.Append(self[i]); if (!insideQuote) { insideQuote = true; continue; } insideQuote = false; tokens.Add(str.ToString()); str.Clear(); continue; } if (insideQuote) { str.Append(self[i]); continue; } if (self[i] == ' ' && i + 1 < self.Length && self[i + 1] == ' ') { continue; } seperatorPos.Clear(); for (int seperatorIndex = 0; seperatorIndex < delimiters.Length; seperatorIndex++) { string sep = delimiters[seperatorIndex]; if (sep.Length > (self.Length - i)) { seperatorPos.Remove(seperatorIndex); continue; } if (self.Substring(i, sep.Length).Equals(sep, stringComparison)) { seperatorPos.Add(seperatorIndex); } } if (seperatorPos.Count > 1) { var r = seperatorPos .Select(x => new { Sep = delimiters[x], Idx = x }) .OrderByDescending(x => x.Sep.Length); foreach (var item in r.Skip(1)) { seperatorPos.Remove(item.Idx); } } switch (seperatorPos.Count) { case 0: str.Append(self[i]); break; case 1: if (str.Length > 0) { tokens.Add(str.ToString()); str.Clear(); } string sep = delimiters[seperatorPos.First()]; tokens.Add(self.Substring(i, sep.Length)); i += sep.Length - 1; break; default: throw new InvalidOperationException(); } } if (insideQuote) { throw new InvalidOperationException($"Missing ending quote, {str}"); } if (str.Length > 0) { tokens.Add(str.ToString()); } return(tokens); }
private void ValidateReferencesCore() { // can happen when project is unloaded and reloaded or in Venus (aspx) case if (ProjectFilePath == null || BinOutputPath == null || ObjOutputPath == null) { return; } object property = null; if (ErrorHandler.Failed(Hierarchy.GetProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_ExtObject, out property))) { return; } var dteProject = property as EnvDTE.Project; if (dteProject == null) { return; } var vsproject = dteProject.Object as VSProject; if (vsproject == null) { return; } var noReferenceOutputAssemblies = new List <string>(); var factory = this.ServiceProvider.GetService(typeof(SVsEnumHierarchyItemsFactory)) as IVsEnumHierarchyItemsFactory; IEnumHierarchyItems items; if (ErrorHandler.Failed(factory.EnumHierarchyItems(Hierarchy, (uint)__VSEHI.VSEHI_Leaf, (uint)VSConstants.VSITEMID.Root, out items))) { return; } uint fetched; VSITEMSELECTION[] item = new VSITEMSELECTION[1]; while (ErrorHandler.Succeeded(items.Next(1, item, out fetched)) && fetched == 1) { // ignore ReferenceOutputAssembly=false references since those will not be added to us in design time. var storage = Hierarchy as IVsBuildPropertyStorage; string value; storage.GetItemAttribute(item[0].itemid, "ReferenceOutputAssembly", out value); object caption; Hierarchy.GetProperty(item[0].itemid, (int)__VSHPROPID.VSHPROPID_Caption, out caption); if (string.Equals(value, "false", StringComparison.OrdinalIgnoreCase) || string.Equals(value, "off", StringComparison.OrdinalIgnoreCase) || string.Equals(value, "0", StringComparison.OrdinalIgnoreCase)) { noReferenceOutputAssemblies.Add((string)caption); } } var projectReferences = GetCurrentProjectReferences(); var metadataReferences = GetCurrentMetadataReferences(); var set = new HashSet <string>(vsproject.References.OfType <Reference>().Select(r => PathUtilities.IsAbsolute(r.Name) ? Path.GetFileNameWithoutExtension(r.Name) : r.Name), StringComparer.OrdinalIgnoreCase); var delta = set.Count - noReferenceOutputAssemblies.Count - (projectReferences.Length + metadataReferences.Length); if (delta == 0) { return; } // okay, two has different set of dlls referenced. check special Microsoft.VisualBasic case. if (delta != 1) { //// Contract.Requires(false, "different set of references!!!"); return; } set.ExceptWith(noReferenceOutputAssemblies); set.ExceptWith(projectReferences.Select(r => ProjectTracker.GetProject(r.ProjectId).DisplayName)); set.ExceptWith(metadataReferences.Select(m => Path.GetFileNameWithoutExtension(m.FilePath))); //// Contract.Requires(set.Count == 1); var reference = set.First(); if (!string.Equals(reference, "Microsoft.VisualBasic", StringComparison.OrdinalIgnoreCase)) { //// Contract.Requires(false, "unknown new reference " + reference); return; } #if DEBUG // when we are missing microsoft.visualbasic reference, make sure we have embedded vb core option on. Contract.Requires(Debug_VBEmbeddedCoreOptionOn); #endif }
public Hopcroft(IEnumerable <int> normalStates, IEnumerable <IEnumerable <int> > finalStates, int alphabetSize, Func <HashSet <int>, int, IEnumerable <int> > getIncomingStates) { // From Wikipedia // P := {F, Q \ F}; // W := {F}; // while (W is not empty) do // choose and remove a set A from W // for each c in Σ do // let X be the set of states for which a transition on c leads to a state in A // for each set Y in P for which X ∩ Y is nonempty and Y \ X is nonempty do // replace Y in P by the two sets X ∩ Y and Y \ X // if Y is in W // replace Y in W by the same two sets // else // if |X ∩ Y| <= |Y \ X| // add X ∩ Y to W // else // add Y \ X to W // end; // end; // end; var s = Enumerable.Range(0, alphabetSize).ToList(); // The alphabet. var n = new HashSet <int>(normalStates); // All nonfinal states. var f = finalStates.Select(fs => new HashSet <int>(fs)).ToList(); var p = new HashSet <HashSet <int> > { n }; // The partition. var q = new HashSet <int>(f.Where(ff => ff.Count == 1).Select(ff => ff.First())); // Single element sets of the partition are collected here for performance. var w = new HashSet <HashSet <int> >(f.Where(ff => ff.Count > 1)); // The remaining sets to check. while (w.Count > 0) { var a = w.First(); w.Remove(a); foreach (var c in s) { var x = new HashSet <int>(getIncomingStates(a, c)); if (x.Count == 0) { continue; // Intersections with an empty set are always empty. } foreach (var y in p.ToList()) // p is modified in this loop, but all new elements are inserted into w and checked later. { if (IsIntersectionEmpty(x, y)) { continue; } if (IsDifferenceEmpty(y, x)) { continue; } p.Remove(y); var intersection = new HashSet <int>(Intersect(x, y)); if (intersection.Count > 1) { p.Add(intersection); } else { q.Add(intersection.First()); } var difference = new HashSet <int>(Subtract(y, x)); if (difference.Count > 1) { p.Add(difference); } else { q.Add(difference.First()); } if (w.Contains(y)) { w.Remove(y); w.Add(intersection); w.Add(difference); } else { w.Add(intersection.Count <= difference.Count ? intersection : difference); } } } } EquivalenceGroups = p.Concat(q.Select(i => new HashSet <int> { i })).ToList(); }
/// <summary> /// If an empty array is passed, the result will be null /// if there are instances, but they share no common supertype the result will be var /// </summary> public static ClassNode GetGreatestCommonSubclassForArray(StackValue array, RuntimeCore runtimeCore) { if (!array.IsArray) { throw new ArgumentException("The stack value provided was not an array"); } Dictionary <ClassNode, int> typeStats = GetTypeStatisticsForArray(array, runtimeCore); //@PERF: This could be improved with a List <List <int> > chains = new List <List <int> >(); HashSet <int> commonTypeIDs = new HashSet <int>(); foreach (ClassNode cn in typeStats.Keys) { List <int> chain = ClassUtils.GetClassUpcastChain(cn, runtimeCore); //Now add in the other conversions - as we don't have a common superclass yet //@TODO(Jun): Remove this hack when we have a proper casting structure foreach (int id in cn.CoerceTypes.Keys) { if (!chain.Contains(id)) { chain.Add((id)); } } chains.Add(chain); foreach (int nodeId in chain) { commonTypeIDs.Add(nodeId); } } //Remove nulls if they exist { if (commonTypeIDs.Contains( (int)PrimitiveType.Null)) { commonTypeIDs.Remove((int)PrimitiveType.Null); } List <List <int> > nonNullChains = new List <List <int> >(); foreach (List <int> chain in chains) { if (chain.Contains((int)PrimitiveType.Null)) { chain.Remove((int)PrimitiveType.Null); } if (chain.Count > 0) { nonNullChains.Add(chain); } } chains = nonNullChains; } //Contract the hashset so that it contains only the nodes present in all chains //@PERF: this is very inefficent { foreach (List <int> chain in chains) { commonTypeIDs.IntersectWith(chain); } } //No common subtypes if (commonTypeIDs.Count == 0) { return(null); } if (commonTypeIDs.Count == 1) { return(runtimeCore.DSExecutable.classTable.ClassNodes[commonTypeIDs.First()]); } List <int> lookupChain = chains[0]; //Insertion sort the IDs, we may only have a partial ordering on them. List <int> orderedTypes = new List <int>(); foreach (int typeToInsert in commonTypeIDs) { bool inserted = false; for (int i = 0; i < orderedTypes.Count; i++) { int orderedType = orderedTypes[i]; if (lookupChain.IndexOf(typeToInsert) < lookupChain.IndexOf(orderedType)) { inserted = true; orderedTypes.Insert(i, typeToInsert); break; } } if (!inserted) { orderedTypes.Add(typeToInsert); } } return(runtimeCore.DSExecutable.classTable.ClassNodes[orderedTypes.First()]); }
/// <summary> /// Retrieve all Buggs matching the search criteria. /// </summary> /// <param name="FormData">The incoming form of search criteria from the client.</param> /// <param name="BuggIDToBeAddedToJira">ID of the bugg that will be added to JIRA</param> /// <returns>A view to display the filtered Buggs.</returns> public ReportsViewModel GetResults(FormHelper FormData, int BuggIDToBeAddedToJira) { BuggRepository BuggsRepo = new BuggRepository(); CrashRepository CrashRepo = new CrashRepository(); // It would be great to have a CSV export of this as well with buggs ID being the key I can then use to join them :) // // Enumerate JIRA projects if needed. // https://jira.ol.epicgames.net//rest/api/2/project var JC = JiraConnection.Get(); var JiraComponents = JC.GetNameToComponents(); var JiraVersions = JC.GetNameToVersions(); using (FAutoScopedLogTimer LogTimer = new FAutoScopedLogTimer(this.GetType().ToString())) { string AnonumousGroup = "Anonymous"; //List<String> Users = FRepository.Get().GetUserNamesFromGroupName( AnonumousGroup ); int AnonymousGroupID = FRepository.Get(BuggsRepo).FindOrAddGroup(AnonumousGroup); HashSet <int> AnonumousIDs = FRepository.Get(BuggsRepo).GetUserIdsFromUserGroup(AnonumousGroup); int AnonymousID = AnonumousIDs.First(); var Crashes = CrashRepo .FilterByDate(CrashRepo.ListAll(), FormData.DateFrom, FormData.DateTo) // Only crashes and asserts .Where(Crash => Crash.CrashType == 1 || Crash.CrashType == 2) // Only anonymous user .Where(Crash => Crash.UserNameId.Value == AnonymousID) .Select(Crash => new { ID = Crash.Id, TimeOfCrash = Crash.TimeOfCrash.Value, //UserID = Crash.UserNameId.Value, BuildVersion = Crash.BuildVersion, JIRA = Crash.Jira, Platform = Crash.PlatformName, FixCL = Crash.FixedChangeList, BuiltFromCL = Crash.BuiltFromCL, Pattern = Crash.Pattern, MachineID = Crash.MachineId, Branch = Crash.Branch, Description = Crash.Description, RawCallStack = Crash.RawCallStack, }) .ToList(); int NumCrashes = Crashes.Count; /* * // Build patterns for crashes where patters is null. * var CrashesWithoutPattern = FRepository.Get().Crashes * .FilterByDate( FRepository.Get().Crashes.ListAll(), FormData.DateFrom, FormData.DateTo.AddDays( 1 ) ) * // Only crashes and asserts * .Where( Crash => Crash.Pattern == null || Crash.Pattern == "" ) * .Select( Crash => Crash ) * .ToList(); * * foreach( var Crash in CrashesWithoutPattern ) * { * Crash.BuildPattern(); * } */ // Total # of ALL (Anonymous) crashes in timeframe int TotalAnonymousCrashes = NumCrashes; // Total # of UNIQUE (Anonymous) crashes in timeframe HashSet <string> UniquePatterns = new HashSet <string>(); HashSet <string> UniqueMachines = new HashSet <string>(); Dictionary <string, int> PatternToCount = new Dictionary <string, int>(); //List<int> CrashesWithoutPattern = new List<int>(); //List<DateTime> CrashesWithoutPatternDT = new List<DateTime>(); foreach (var Crash in Crashes) { if (string.IsNullOrEmpty(Crash.Pattern)) { //CrashesWithoutPattern.Add( Crash.ID ); //CrashesWithoutPatternDT.Add( Crash.TimeOfCrash ); continue; } UniquePatterns.Add(Crash.Pattern); UniqueMachines.Add(Crash.MachineID); bool bAdd = !PatternToCount.ContainsKey(Crash.Pattern); if (bAdd) { PatternToCount.Add(Crash.Pattern, 1); } else { PatternToCount[Crash.Pattern]++; } } var PatternToCountOrdered = PatternToCount.OrderByDescending(X => X.Value).ToList(); const int NumTopRecords = 200; var PatternAndCount = PatternToCountOrdered.Take(NumTopRecords).ToDictionary(X => X.Key, Y => Y.Value); int TotalUniqueAnonymousCrashes = UniquePatterns.Count; // Total # of AFFECTED USERS (Anonymous) in timeframe int TotalAffectedUsers = UniqueMachines.Count; var RealBuggs = BuggsRepo.Context.Buggs.Where(Bugg => PatternAndCount.Keys.Contains(Bugg.Pattern)).ToList(); // Build search string. HashSet <string> FoundJiras = new HashSet <string>(); Dictionary <string, List <Bugg> > JiraIDtoBugg = new Dictionary <string, List <Bugg> >(); List <Bugg> Buggs = new List <Bugg>(NumTopRecords); foreach (var Top in PatternAndCount) { Bugg NewBugg = RealBuggs.Where(X => X.Pattern == Top.Key).FirstOrDefault(); if (NewBugg != null) { using (FAutoScopedLogTimer TopTimer = new FAutoScopedLogTimer(string.Format("{0}:{1}", Buggs.Count + 1, NewBugg.Id))) { var CrashesForBugg = Crashes.Where(Crash => Crash.Pattern == Top.Key).ToList(); // Convert anonymous to full type; var FullCrashesForBugg = new List <Crash>(CrashesForBugg.Count); foreach (var Anon in CrashesForBugg) { FullCrashesForBugg.Add(new Crash() { ID = Anon.ID, TimeOfCrash = Anon.TimeOfCrash, BuildVersion = Anon.BuildVersion, Jira = Anon.JIRA, Platform = Anon.Platform, FixCL = Anon.FixCL, BuiltFromCL = Anon.BuiltFromCL, Pattern = Anon.Pattern, MachineId = Anon.MachineID, Branch = Anon.Branch, Description = Anon.Description, RawCallStack = Anon.RawCallStack, }); } NewBugg.PrepareBuggForJira(FullCrashesForBugg); // Verify valid JiraID, this may be still a TTP if (!string.IsNullOrEmpty(NewBugg.Jira)) { int TTPID = 0; int.TryParse(NewBugg.Jira, out TTPID); if (TTPID == 0) { AddBuggJiraMapping(NewBugg, ref FoundJiras, ref JiraIDtoBugg); } } Buggs.Add(NewBugg); } } else { FLogger.Global.WriteEvent("Bugg for pattern " + Top.Key + " not found"); } } if (BuggIDToBeAddedToJira > 0) { var Bugg = Buggs.Where(X => X.Id == BuggIDToBeAddedToJira).FirstOrDefault(); if (Bugg != null) { Bugg.CopyToJira(); AddBuggJiraMapping(Bugg, ref FoundJiras, ref JiraIDtoBugg); } } if (JC.CanBeUsed()) { var BuggsCopy = new List <Bugg>(Buggs); HashSet <string> InvalidJiras = new HashSet <string>(); // Simple verification of JIRA foreach (var Value in FoundJiras) { if (Value.Length < 3 || !Value.Contains('-')) { InvalidJiras.Add(Value); } } foreach (var InvalidJira in InvalidJiras) { FoundJiras.Remove(InvalidJira); } // Grab the data form JIRA. string JiraSearchQuery = string.Join(" OR ", FoundJiras); using (FAutoScopedLogTimer JiraResultsTimer = new FAutoScopedLogTimer("JiraResults")) { bool bInvalid = false; var JiraResults = new Dictionary <string, Dictionary <string, object> >(); try { JiraResults = JC.SearchJiraTickets( JiraSearchQuery, new string[] { "key", // string "summary", // string "components", // System.Collections.ArrayList, Dictionary<string,object>, name "resolution", // System.Collections.Generic.Dictionary`2[System.String,System.Object], name "fixVersions", // System.Collections.ArrayList, Dictionary<string,object>, name "customfield_11200" // string }); } catch (System.Exception) { bInvalid = true; } // Invalid records have been found, find the broken using the slow path. if (bInvalid) { foreach (var Query in FoundJiras) { try { var TempResult = JC.SearchJiraTickets( Query, new string[] { "key", // string "summary", // string "components", // System.Collections.ArrayList, Dictionary<string,object>, name "resolution", // System.Collections.Generic.Dictionary`2[System.String,System.Object], name "fixVersions", // System.Collections.ArrayList, Dictionary<string,object>, name "customfield_11200" // string }); foreach (var Temp in TempResult) { JiraResults.Add(Temp.Key, Temp.Value); } } catch (System.Exception) { } } } // Jira Key, Summary, Components, Resolution, Fix version, Fix changelist foreach (var Jira in JiraResults) { string JiraID = Jira.Key; string Summary = (string)Jira.Value["summary"]; string ComponentsText = ""; System.Collections.ArrayList Components = (System.Collections.ArrayList)Jira.Value["components"]; foreach (Dictionary <string, object> Component in Components) { ComponentsText += (string)Component["name"]; ComponentsText += " "; } Dictionary <string, object> ResolutionFields = (Dictionary <string, object>)Jira.Value["resolution"]; string Resolution = ResolutionFields != null ? (string)ResolutionFields["name"] : ""; string FixVersionsText = ""; System.Collections.ArrayList FixVersions = (System.Collections.ArrayList)Jira.Value["fixVersions"]; foreach (Dictionary <string, object> FixVersion in FixVersions) { FixVersionsText += (string)FixVersion["name"]; FixVersionsText += " "; } int FixCL = Jira.Value["customfield_11200"] != null ? (int)(decimal)Jira.Value["customfield_11200"] : 0; List <Bugg> BuggsForJira; JiraIDtoBugg.TryGetValue(JiraID, out BuggsForJira); //var BuggsForJira = JiraIDtoBugg[JiraID]; if (BuggsForJira != null) { foreach (Bugg Bugg in BuggsForJira) { Bugg.JiraSummary = Summary; Bugg.JiraComponentsText = ComponentsText; Bugg.JiraResolution = Resolution; Bugg.JiraFixVersionsText = FixVersionsText; if (FixCL != 0) { Bugg.JiraFixCL = FixCL.ToString(); } BuggsCopy.Remove(Bugg); } } } } // If there are buggs, we need to update the summary to indicate an error. // Usually caused when bugg's project has changed. foreach (var Bugg in BuggsCopy.Where(b => !string.IsNullOrEmpty(b.Jira))) { Bugg.JiraSummary = "JIRA MISMATCH"; Bugg.JiraComponentsText = "JIRA MISMATCH"; Bugg.JiraResolution = "JIRA MISMATCH"; Bugg.JiraFixVersionsText = "JIRA MISMATCH"; Bugg.JiraFixCL = "JIRA MISMATCH"; } } Buggs = Buggs.OrderByDescending(b => b.CrashesInTimeFrameGroup).ToList(); return(new ReportsViewModel { Buggs = Buggs, /*Crashes = Crashes,*/ DateFrom = (long)(FormData.DateFrom - CrashesViewModel.Epoch).TotalMilliseconds, DateTo = (long)(FormData.DateTo - CrashesViewModel.Epoch).TotalMilliseconds, TotalAffectedUsers = TotalAffectedUsers, TotalAnonymousCrashes = TotalAnonymousCrashes, TotalUniqueAnonymousCrashes = TotalUniqueAnonymousCrashes }); } }
private static void ValidateAndCorrectHeaders(HeaderSection hs) { // https://tools.ietf.org/html/rfc7230#section-5.4 // Since the Host field - value is critical information for handling a // request, a user agent SHOULD generate Host as the first header field // following the request - line. HeaderField hostToCorrect = null; foreach (var f in hs.Fields) { // if we find host if (f.Name == "Host") { // if host is not first if (hs.Fields.First().Name != "Host") { // then correct host hostToCorrect = f; break; } } } if (hostToCorrect != null) { hs.Fields.Remove(hostToCorrect); hs.Fields.Insert(0, hostToCorrect); } // https://tools.ietf.org/html/rfc7230#section-3.3.2 // If a message is received that has multiple Content-Length header // fields with field-values consisting of the same decimal value, or a // single Content-Length header field with a field value containing a // list of identical decimal values(e.g., "Content-Length: 42, 42"), // indicating that duplicate Content-Length header fields have been // generated or combined by an upstream message processor, then the // recipient MUST either reject the message as invalid or replace the // duplicated field - values with a single valid Content - Length field // containing that decimal value prior to determining the message body // length or forwarding the message. var allParts = new HashSet<string>(); foreach (var field in hs.Fields) { if (field.Name == "Content-Length") { var parts = field.Value.Trim().Split(','); foreach (var part in parts) { allParts.Add(part.Trim()); } } } if (allParts.Count > 0) // then has Content-Length field { if (allParts.Count > 1) { throw new InvalidDataException("Invalid Content-Length."); } hs.Fields.RemoveAll(x => x.Name == "Content-Length"); hs.Fields.Add(new HeaderField("Content-Length", allParts.First())); } }
//Generate a random worldgraph using the specified number of regions and item list public WorldGraph Generate() { HashSet <Region> regions = new HashSet <Region>(); for (int i = 0; i < Regions; i++) { Region r = new Region("Region-" + i.ToString()); //Each region is named Region_x. So Region-1, Region-2, etc. regions.Add(r); } //Not must loop through each region to add exits //Separate loop from the previous so that all regions are available to add as exits foreach (Region r in regions) { //The first region has some specific conditions: // 1. First exit is guaranteed to have no requirement and goes to hub region // 2. There is a guaranteed second exit, that will have a single item requirement // 3. There is a 50% chance to have a third exit, which has a 50% chance between a single item and no item if (r.Name == "Region-0") { //Add exit to hub region with no requirement List <string> currentexits = new List <string>(); Region hub = regions.First(x => x.Name == "Region-1"); AddExitsNoRequirement(regions, r, hub); currentexits.Add("Region-1"); //Add exit to 2nd region with single requirement Region second = GetRandomAvailableRegion(regions, r, currentexits); AddExitsOneRequirement(regions, r, second); currentexits.Add(second.Name); //50% chance to add a third region int random = rng.Next(1, 3); //Either 1 or 2 if (random == 2) { Region third = GetRandomAvailableRegion(regions, r, currentexits); random = rng.Next(1, 3); //50% chance to have 1 requirement, 50% chance to have none if (random == 2) { AddExitsNoRequirement(regions, r, third); } else { AddExitsOneRequirement(regions, r, third); } } } //The second region is the hub region and also has some specific conditions: // 1. Will connect to 5 regions besides the start region // 2. Half of its exits will have no item requirement, the other half will have one else if (r.Name == "Region-1") { List <string> currentexits = new List <string>(); currentexits.Add("Region-0"); for (int i = 0; i < 5; i++) //Run for 5 iterations { Region to = GetRandomAvailableRegion(regions, r, currentexits); if (i < 2) //First 3 exits (including start region) have no requirement { AddExitsNoRequirement(regions, r, to); } else //Next 3 iterations will have 1 requirement { AddExitsOneRequirement(regions, r, to); } currentexits.Add(to.Name); } } //Every other region will have a number of exits in [1, 4], however max of 2 chosen at generation, 2 more can be added by a later region else { int ExitNum = rng.Next(1, 3); //Generate random number in [1, 2] //In case r already has exits, create a list which contains all its current exits List <string> currentexits = new List <string>(); foreach (Exit e in r.Exits) { currentexits.Add(e.ToRegionName); } while (r.Exits.Count < ExitNum) //Possible that location already has specified number of exits, no big deal if so { Region to = GetRandomAvailableRegion(regions, r, currentexits); if (!string.IsNullOrEmpty(to.Name)) { AddExits(regions, r, to); //Add exit from r to the random region currentexits.Add(to.Name); //Also add dest region to list so it does not get added twice } else //Don't want to do this if r has 0 exits, but that logic is handled in GetRandomAvailableRegion { break; } } } } //Must make sure all locations are reachable Generated = new WorldGraph("Region-0", "Goal", regions.ToHashSet(), MajorItemList); List <Region> unreachable = Generated.GetUnreachableRegions(); while (unreachable.Count > 0) //At least one reachable location { //Create a connection from a random reachable location to a random unreachable location List <Region> regionscopy = regions.ToList(); Helper.Shuffle(regionscopy); Region from = regionscopy.First(x => !unreachable.Contains(x)); //Not in unreachable, so it is reachable Helper.Shuffle(unreachable); Region to = unreachable.First(); //Unreachable AddExits(regions, from, to); //Add connection between two regions to join subgraphs Generated = new WorldGraph("Region-0", "Goal", regions.ToHashSet(), MajorItemList); unreachable = Generated.GetUnreachableRegions(); //Recompute reachability } //Now before adding items, we will get the last region and place the goal there- No other items will be placed there Generated = new WorldGraph("Region-0", "Goal", regions.ToHashSet(), MajorItemList); Region goalregion = Generated.Regions.Last(); Item goalitem = new Item("Goal", 3); //Create goal item Location goallocation = new Location("Final Boss", "None", goalitem); //Create location for goal item with no requirement since entrance to region will have full requirement //We want all exits to the goal region to require every item so that they will all be required to complete the game string fullrequirement = ""; foreach (Item i in MajorItemList) { fullrequirement += i.Name + " and "; } fullrequirement = fullrequirement.Substring(0, fullrequirement.Length - 5); //Remove final " and " regions.First(x => x == goalregion).Locations.Add(goallocation); foreach (Exit e in regions.First(x => x == goalregion).Exits) { e.Requirements = fullrequirement; } //Must also write to requirements leading into final region foreach (Region r in regions) { foreach (Exit e in r.Exits) { if (e.ToRegionName == goalregion.Name) { e.Requirements = fullrequirement; } } } //Finally, generate item locations and place the location in the region foreach (Region r in regions) { //The first region has some specific conditions: // 1. Three locations with no requirement // 2. 50% chance of a 4th location with one requirement if (r.Name == "Region-0") { int random = rng.Next(3, 5); for (int i = 0; i < random; i++) { if (i < random - 1) //Guaranteed 3 locations with no requirement { Location l = new Location("Region-0_Location-" + i.ToString(), "None", new Item()); regions.First(x => x == r).Locations.Add(l); //Add generated location to region } else //Possible 4th location, have 1 requirement { Location l = new Location("Region-0_Location-" + i.ToString(), GenerateOneRandomRequirement(), new Item()); regions.First(x => x == r).Locations.Add(l); //Add generated location to region } } } //The second region is the hub region and also has some specific conditions: // 1. Two locations with no requirement // 2. One location with one requirement // 3. One location with two requirements else if (r.Name == "Region-1") { for (int i = 0; i < 4; i++) { if (i < 2) { Location l = new Location("Region-1_Location-" + i.ToString(), "None", new Item()); regions.First(x => x == r).Locations.Add(l); //Add generated location to region } else if (i == 2) { Location l = new Location("Region-1_Location-" + i.ToString(), GenerateOneRandomRequirement(), new Item()); regions.First(x => x == r).Locations.Add(l); //Add generated location to region } else if (i == 3) { Location l = new Location("Region-1_Location-" + i.ToString(), GenerateTwoRandomRequirements(), new Item()); regions.First(x => x == r).Locations.Add(l); //Add generated location to region } } } //Every other region will generate 2 to 4 locations, unless region contains goal, in which case we want that to be the only location in that region else if (r != goalregion) { //Generate 2 to 4 locations per region int random = rng.Next(2, 5); for (int i = 0; i < random; i++) { //Generate a location with: // Name: Region-x_Location-y, ex Region-5_Location-2 // Requirement: Randomly Generated // Item: null item Location l = new Location(r.Name + "_Location-" + i.ToString(), GenerateRandomRequirement(false), new Item()); regions.First(x => x == r).Locations.Add(l); //Add generated location to region } } } //Now that we have a total number of regions and a count of major items, must generate junk items to fill out the item list List <Item> ItemList = MajorItemList; //Copy major item list and add goal item ItemList.Add(goalitem); Generated = new WorldGraph("Region-0", "Goal", regions.ToHashSet(), ItemList.OrderByDescending(x => x.Importance).ThenBy(x => x.Name).ToList()); //Remake generated now that items have been added int locationcount = Generated.GetLocationCount(); //Get location count and find difference so we know how many junk items to generate int difference = locationcount - MajorItemList.Count(); for (int i = 0; i < difference; i++) { //For a junk item, importance will be either 0 or 1, so generate one of those numbers randomly int importance = rng.Next(0, 2); Item newitem = new Item("JunkItem" + importance.ToString(), importance); //Name will either be JunkItem0 or JunkItem1 ItemList.Add(newitem); } Generated = new WorldGraph("Region-0", "Goal", regions.ToHashSet(), ItemList.OrderByDescending(x => x.Importance).ThenBy(x => x.Name).ToList()); //Remake generated now that items have been added return(Generated); }
public static string DecisionTreeToXpath(DecisionNode dn, HashSet <Feature> prevFeatureSet, double precisionThreshold = 1, Boolean first = true) { if ((dn.SelectedPositive.Count() == 0 || dn.precision > precisionThreshold) && first) { return(""); } string res = ""; HashSet <Feature> currFeature = new HashSet <Feature>(dn.FeatureSet.Except(prevFeatureSet)); if (currFeature.Count() == 0) { if (dn.SetSelected != null) { Console.Write("positive with no features"); } return(""); } Feature cf = currFeature.First(); string currSelector = ""; if (cf.nodeNameFeature) { currSelector = (cf.Axe.Equals(Feature.Axes.Self)?"self::":"ancestor-or-self::") + cf.feature.First(); } else { currSelector = (cf.Axe.Equals(Feature.Axes.Self) ? "self::" : "ancestor-or-self::") + " *[" + cf.feature.First() + "]"; } string rightSelector = ""; if (dn.SetSelected != null && dn.SetSelected.precision <= precisionThreshold) { rightSelector = DecisionTreeToXpath(dn.SetSelected, dn.FeatureSet); } string leftSelector = ""; if (dn.SetNotSelected != null) { leftSelector = DecisionTreeToXpath(dn.SetNotSelected, dn.FeatureSet, precisionThreshold, false); } res = currSelector; if (!rightSelector.Equals("")) { res = res + " and " + rightSelector; } if (!leftSelector.Equals("")) { HashSet <HtmlNode> fToselectFrom = new HashSet <HtmlNode>(dn.SetSelected.SelectedNegative.Union(dn.SetSelected.SelectedPositive)); HashSet <HtmlNode> restemp = dn.SetNotSelected.selectTrue(fToselectFrom, prevFeatureSet, false, precisionThreshold); HashSet <HtmlNode> resRight = new HashSet <HtmlNode>(restemp.Intersect(DomPool.TargetNodesPrecision)); double precisionF = ((double)resRight.Count() / restemp.Count()); if (restemp.Count() == 0) { precisionF = 1; } HashSet <HtmlNode> toSelectFrom = new HashSet <HtmlNode>(dn.SetNotSelected.SelectedNegative.Union(dn.SetNotSelected.SelectedPositive)); HashSet <HtmlNode> restemp2 = dn.SetNotSelected.selectTrue(toSelectFrom, prevFeatureSet, false, precisionThreshold); HashSet <HtmlNode> resRight2 = new HashSet <HtmlNode>(restemp2.Intersect(DomPool.TargetNodesPrecision)); double precisionF2 = ((double)resRight2.Count() / restemp2.Count()); if (restemp2.Count() == 0) { precisionF2 = 1; } double diff = precisionF2 / precisionF; if (diff <= 1) //Uncomment to use the MFX { res = "((" + res + ") or (" + leftSelector + "))"; } else { res = "((" + res + ") or (not(" + currSelector + ") and (" + leftSelector + ")))"; } // res = "((" + res + ") or (not("+currSelector+") and (" + leftSelector + ")))";// "(("+res + ") or (" + leftSelector+"))"; //res = "((" + res + ") or (" + leftSelector + "))";// "(("+res + ") or (" + leftSelector+"))"; } return(res); }
private void ValidateJump(LabelScopeInfo reference) { // Assume we can do a ret/branch _opCode = _canReturn ? OpCodes.Ret : OpCodes.Br; // look for a simple jump out for (LabelScopeInfo?j = reference; j != null; j = j.Parent) { if (_definitions.Contains(j)) { // found it, jump is valid! return; } if (j.Kind == LabelScopeKind.Finally || j.Kind == LabelScopeKind.Filter) { break; } if (j.Kind == LabelScopeKind.Try || j.Kind == LabelScopeKind.Catch) { _opCode = OpCodes.Leave; } } _acrossBlockJump = true; if (_node != null && _node.Type != typeof(void)) { throw Error.NonLocalJumpWithValue(_node.Name); } if (_definitions.Count > 1) { throw Error.AmbiguousJump(_node !.Name); } // We didn't find an outward jump. Look for a jump across blocks LabelScopeInfo def = _definitions.First(); LabelScopeInfo?common = Helpers.CommonNode(def, reference, b => b.Parent !); // Assume we can do a ret/branch _opCode = _canReturn ? OpCodes.Ret : OpCodes.Br; // Validate that we aren't jumping across a finally for (LabelScopeInfo?j = reference; j != common; j = j.Parent) { if (j !.Kind == LabelScopeKind.Finally) { throw Error.ControlCannotLeaveFinally(); } if (j.Kind == LabelScopeKind.Filter) { throw Error.ControlCannotLeaveFilterTest(); } if (j.Kind == LabelScopeKind.Try || j.Kind == LabelScopeKind.Catch) { _opCode = OpCodes.Leave; } } // Validate that we aren't jumping into a catch or an expression for (LabelScopeInfo?j = def; j != common; j = j.Parent) { if (!j !.CanJumpInto) { if (j.Kind == LabelScopeKind.Expression) { throw Error.ControlCannotEnterExpression(); } else { throw Error.ControlCannotEnterTry(); } } } }
public Vector2[] SetWayPoint(Vector2 startingPos, Vector2 destinationPos) { Heap <Node> OpenNodes = new Heap <Node>(); HashSet <Node> ClosedNodes = new HashSet <Node>(); List <Vector2> path = new List <Vector2>(); Node startingNode = new Node(true, startingPos); OpenNodes.Add(startingNode); while (OpenNodes.Count != 0) { Node currentNode = OpenNodes.RemoveFirst(); if (currentNode.Position == destinationPos) { OpenNodes.ResetNodes(); break; } else { ClosedNodes.Add(currentNode); List <Node> childNodes = grid.GetNeighbours(currentNode).ToList(); GetCollisionCoordinates(childNodes); foreach (var node in childNodes) { if (ClosedNodes.FirstOrDefault(x => x.Position == node.Position) != null || node.Walkable == false) { continue; } int cost = currentNode.gCost + GetDistance(currentNode, node); if (!OpenNodes.Contains(node) || cost < node.gCost) { node.Parent = currentNode; node.gCost = cost; node.hCost = (int)Vector2.Distance(node.Position, destinationPos); if (!OpenNodes.Contains(node)) { OpenNodes.Add(node); } else { OpenNodes.UpdateItem(node); } } } } } Node contextEndNode = ClosedNodes.First(); Node contextCurrentNode = ClosedNodes.Last(); path.Add(destinationPos); while (contextCurrentNode != contextEndNode) { path.Add(contextCurrentNode.Position); contextCurrentNode = contextCurrentNode.Parent; } path.Reverse(); return(path.ToArray()); }
protected override int ExecuteTest() { var removedNodes = 0; var untested = new HashSet <int>(Enumerable.Range(0, SearchSpaceSize)); while (untested.Any()) { var i = untested.First(); untested.Remove(i); var neighbors = EdgeSet.NeighborsOf(i); if (!NodeStates.IsTarget(i)) { if (neighbors.Count == 1) { // Non target nodes with one neighbor can be removed. untested.Add(neighbors[0]); RemoveNode(i); removedNodes++; } else if (neighbors.Count == 2) { // Non target nodes with two neighbors can be removed and their neighbors // connected directly. untested.Add(neighbors[0]); untested.Add(neighbors[1]); RemoveNode(i); removedNodes++; } } else if (NodeStates.IsFixedTarget(i)) { if (neighbors.Count == 1) { var other = neighbors[0]; // Fixed target nodes with one neighbor can be merged with their neighbor since // it must always be taken. // If the neighbor is no target and of degree 1 or 2, it will be removed by the tests above, // after which this node will be processed again. if (EdgeSet.NeighborsOf(other).Count <= 2 && !NodeStates.IsTarget(other)) { continue; } // If this node is the only fixed target, the neighbor must not be merged because // if this is the only target, the neighbor and the rest of the tree will not be in the optimal solution, // if there are only variable target nodes, the neighbor will not be in the optimal solution if the variable nodes are not taken. if (NodeStates.FixedTargetNodeCount == 1) { continue; } untested.Add(i); untested.Remove(other); untested.UnionWith(MergeInto(other, i)); removedNodes++; } else if (neighbors.Count > 1) { // Edges from one target node to another that are of minimum cost among the edges of // one of the nodes can be in any optimal solution. Therefore both target nodes can be merged. var minimumEdgeCost = neighbors.Min(other => DistanceLookup[i, other]); var minimumTargetNeighbors = neighbors.Where(other => DistanceLookup[i, other] == minimumEdgeCost && NodeStates.IsFixedTarget(other)); foreach (var other in minimumTargetNeighbors) { untested.Add(i); untested.Remove(other); untested.UnionWith(MergeInto(other, i)); removedNodes++; // neighbors changed, minimumTargetNeighbors might be outdated break; } } } } return(removedNodes); }
private void HandleTreeReduction(TransactionOperationContext indexContext, IndexingStatsScope stats, MapReduceResultsStore modifiedStore, LowLevelTransaction lowLevelTransaction, IndexWriteOperation writer, LazyStringValue reduceKeyHash, Table table, CancellationToken token) { EnsureValidTreeReductionStats(stats); var tree = modifiedStore.Tree; var branchesToAggregate = new HashSet <long>(); var parentPagesToAggregate = new HashSet <long>(); var page = new TreePage(null, Constants.Storage.PageSize); HashSet <long> compressedEmptyPages = null; foreach (var modifiedPage in modifiedStore.ModifiedPages) { token.ThrowIfCancellationRequested(); page.Base = lowLevelTransaction.GetPage(modifiedPage).Pointer; stats.RecordReduceTreePageModified(page.IsLeaf); if (page.IsLeaf == false) { Debug.Assert(page.IsBranch); branchesToAggregate.Add(modifiedPage); continue; } var leafPage = page; var compressed = leafPage.IsCompressed; if (compressed) { stats.RecordCompressedLeafPage(); } using (compressed ? (DecompressedLeafPage)(leafPage = tree.DecompressPage(leafPage, skipCache: true)) : null) { if (leafPage.NumberOfEntries == 0) { if (leafPage.PageNumber == tree.State.RootPageNumber) { writer.DeleteReduceResult(reduceKeyHash, stats); var emptyPageNumber = Bits.SwapBytes(leafPage.PageNumber); using (Slice.External(indexContext.Allocator, (byte *)&emptyPageNumber, sizeof(long), out Slice pageNumSlice)) table.DeleteByKey(pageNumSlice); continue; } if (compressed) { // it doesn't have any entries after decompression because // each compressed entry has the delete tombstone if (compressedEmptyPages == null) { compressedEmptyPages = new HashSet <long>(); } compressedEmptyPages.Add(leafPage.PageNumber); continue; } throw new UnexpectedReduceTreePageException( $"Encountered empty page which isn't a root. Page {leafPage} in '{tree.Name}' tree."); } var parentPage = tree.GetParentPageOf(leafPage); stats.RecordReduceAttempts(leafPage.NumberOfEntries); try { using (var result = AggregateLeafPage(leafPage, lowLevelTransaction, indexContext, token)) { if (parentPage == -1) { writer.DeleteReduceResult(reduceKeyHash, stats); foreach (var output in result.GetOutputs()) { writer.IndexDocument(reduceKeyHash, output, stats, indexContext); } } else { StoreAggregationResult(leafPage.PageNumber, leafPage.NumberOfEntries, table, result); parentPagesToAggregate.Add(parentPage); } _metrics.MapReduceIndexes.ReducedPerSec.Mark(leafPage.NumberOfEntries); stats.RecordReduceSuccesses(leafPage.NumberOfEntries); } } catch (Exception e) { _index.ThrowIfCorruptionException(e); LogReductionError(e, reduceKeyHash, stats, updateStats: parentPage == -1, page: leafPage); } } } long tmp = 0; using (Slice.External(indexContext.Allocator, (byte *)&tmp, sizeof(long), out Slice pageNumberSlice)) { foreach (var freedPage in modifiedStore.FreedPages) { tmp = Bits.SwapBytes(freedPage); table.DeleteByKey(pageNumberSlice); } } while (parentPagesToAggregate.Count > 0 || branchesToAggregate.Count > 0) { token.ThrowIfCancellationRequested(); var branchPages = parentPagesToAggregate; parentPagesToAggregate = new HashSet <long>(); foreach (var pageNumber in branchPages) { page.Base = lowLevelTransaction.GetPage(pageNumber).Pointer; try { if (page.IsBranch == false) { throw new UnexpectedReduceTreePageException("Parent page was found that wasn't a branch, error at " + page); } stats.RecordReduceAttempts(page.NumberOfEntries); var parentPage = tree.GetParentPageOf(page); using (var result = AggregateBranchPage(page, table, indexContext, branchesToAggregate, token)) { if (parentPage == -1) { writer.DeleteReduceResult(reduceKeyHash, stats); foreach (var output in result.GetOutputs()) { writer.IndexDocument(reduceKeyHash, output, stats, indexContext); } } else { parentPagesToAggregate.Add(parentPage); StoreAggregationResult(page.PageNumber, page.NumberOfEntries, table, result); } _metrics.MapReduceIndexes.ReducedPerSec.Mark(page.NumberOfEntries); stats.RecordReduceSuccesses(page.NumberOfEntries); } } catch (Exception e) { _index.ThrowIfCorruptionException(e); LogReductionError(e, reduceKeyHash, stats, updateStats: true, page: page); } finally { branchesToAggregate.Remove(pageNumber); } } if (parentPagesToAggregate.Count == 0 && branchesToAggregate.Count > 0) { // we still have unaggregated branches which were modified but their children were not modified (branch page splitting) so we missed them parentPagesToAggregate.Add(branchesToAggregate.First()); } } if (compressedEmptyPages != null && compressedEmptyPages.Count > 0) { // we had some compressed pages that are empty after decompression // let's remove them and reduce the tree once again modifiedStore.ModifiedPages.Clear(); modifiedStore.FreedPages.Clear(); foreach (var pageNumber in compressedEmptyPages) { page.Base = lowLevelTransaction.GetPage(pageNumber).Pointer; using (var emptyPage = tree.DecompressPage(page, skipCache: true)) { if (emptyPage.NumberOfEntries > 0) // could be changed meanwhile { continue; } modifiedStore.Tree.RemoveEmptyDecompressedPage(emptyPage); } } HandleTreeReduction(indexContext, stats, modifiedStore, lowLevelTransaction, writer, reduceKeyHash, table, token); } }
public static void Draw(Rect rect) { rect.height = Mathf.Min(listing.curY + 8, rect.height); Widgets.DrawBoxSolid(rect, Widgets.WindowBGFillColor); rect = rect.ContractedBy(4); viewFrustum = rect.AtZero(); viewFrustum.y = scrollOffset.y; var innerRect = rect.AtZero(); innerRect.height = listing.curY; innerRect.width -= 24f; innerRect.x += 6; Widgets.BeginScrollView(rect, ref scrollOffset, innerRect); GUI.BeginGroup(innerRect); listing.Begin(innerRect); Text.Anchor = TextAnchor.MiddleLeft; Text.Font = GameFont.Tiny; lock (sync) { if (HighlightedEntry > cachedEntries.Count - 1) { HighlightedEntry = 0; } if (HighlightedEntry < 0) { HighlightedEntry = cachedEntries.Count - 1; } if (!(cachedEntries.Count == 1 && cachedEntries.First() == searchText)) { int L = -1; foreach (var entry in cachedEntries) { L++; var r = listing.GetRect(Text.LineHeight); if (arrowPressed) { if (L == HighlightedEntry) { if (r.y < viewFrustum.y) { scrollOffset.y = r.y; } if (r.yMax + r.height > viewFrustum.yMax) { scrollOffset.y = r.yMax + r.height - viewFrustum.height; } } } if (!r.Overlaps(viewFrustum)) { continue; } if (Widgets.ButtonInvisible(r) || L == HighlightedEntry && returned) { Panel_DevOptions.currentInput = entry; GUI.FocusControl("profileinput"); } if (Mouse.IsOver(r) || L == HighlightedEntry) { Widgets.DrawHighlight(r); } r.width = 2000; Widgets.Label(r, entry); } } } listing.End(); GUI.EndGroup(); Widgets.EndScrollView(); DubGUI.ResetFont(); returned = false; arrowPressed = false; }
/// <summary> /// Return a pb_ActionResult indicating the success/failure of action. /// </summary> /// <returns></returns> public override ActionResult DoAction() { var selection = MeshSelection.top.ToArray(); Undo.RecordObjects(selection, "Removing Edges"); List <Face> edgeFaces = new List <Face>(); Dictionary <Face, int> faceToMergeGroup = new Dictionary <Face, int>(); HashSet <int> mergeGroupIDs = new HashSet <int>(); List <List <Face> > mergeGroups = new List <List <Face> >(); foreach (ProBuilderMesh pbMesh in selection) { if (pbMesh.selectedEdgeCount > 0) { var selectedEdges = pbMesh.selectedEdges; faceToMergeGroup.Clear(); foreach (var edge in selectedEdges) { edgeFaces.Clear(); mergeGroupIDs.Clear(); //Retrieving impacted faces from edge ElementSelection.GetNeighborFaces(pbMesh, edge, edgeFaces); //Chacking all edges status foreach (var face in edgeFaces) { if (faceToMergeGroup.ContainsKey(face)) { mergeGroupIDs.Add(faceToMergeGroup[face]); } else { faceToMergeGroup.Add(face, -1); } } //These faces haven't been seen before if (mergeGroupIDs.Count == 0) { foreach (var face in edgeFaces) { faceToMergeGroup[face] = mergeGroups.Count; } mergeGroups.Add(new List <Face>(edgeFaces)); } //If only a face should already be merge, add other faces to the same merge group else if (mergeGroupIDs.Count == 1) { foreach (var face in edgeFaces) { if (faceToMergeGroup[face] == -1) { int index = mergeGroupIDs.First(); faceToMergeGroup[face] = index; mergeGroups[index].Add(face); } } } //If more than a face already belongs to a merge group, merge these groups together else { //Group the different mergeGroups together List <Face> facesToMerge = new List <Face>(); foreach (var groupID in mergeGroupIDs) { facesToMerge.AddRange(mergeGroups[groupID]); mergeGroups[groupID] = null; } foreach (var face in edgeFaces) { if (!facesToMerge.Contains(face)) { facesToMerge.Add(face); } } //Remove unnecessary groups mergeGroups.RemoveAll(group => group == null); //Add newly created one mergeGroups.Add(facesToMerge); //Update groups references for (int i = 0; i < mergeGroups.Count; i++) { foreach (var face in mergeGroups[i]) { faceToMergeGroup[face] = i; } } } } foreach (var mergeGroup in mergeGroups) { MergeElements.Merge(pbMesh, mergeGroup); } pbMesh.ToMesh(); pbMesh.Refresh(); pbMesh.Optimize(); } } MeshSelection.ClearElementSelection(); // Rebuild the pb_Editor caches ProBuilderEditor.Refresh(); return(new ActionResult(ActionResult.Status.Success, "Edges Removed")); }