public static void updateVertex(SimplePriorityQueue <Spot> openSet, Spot s, Spot succ, Spot start, Spot goal) { double angle = theta(s.pos, s.parent.pos, succ.pos); if (s != start && s.lb <= angle && s.ub >= angle) { // Path - 2 if (s.parent.g + s.parent.pos.DistanceTo(succ.pos) < succ.g && s.los) { succ.g = s.parent.g + (float)s.parent.pos.DistanceTo(succ.pos); succ.parent = s.parent; if (openSet.Contains(succ)) { openSet.Remove(succ); } succ.f = succ.g + calcHeuristic(succ, goal); openSet.Enqueue(succ, succ.f); } } else { // Path - 1 if (s.g + s.pos.DistanceTo(succ.pos) < succ.g && s.los) { succ.g = s.g + (float)s.pos.DistanceTo(succ.pos); succ.parent = s; if (openSet.Contains(succ)) { openSet.Remove(succ); } succ.f = succ.g + calcHeuristic(succ, goal); openSet.Enqueue(succ, succ.f); } } }
public override void Update() { base.Update(); //Get next stage's entry limit int maxToMoveOn = pipeline.GetStage(StageID + 1).EntryLimit; int movedOn = 0; //Iterate over queue in order, until at most maxToMoveOn items have been moved on foreach (var item in queue) { if (movedOn >= maxToMoveOn) { break; } if (pipeline.NextStageFreeForChunk(item, StageID)) { //Just before the chunk would move on, re-check that the preconditions still hold if (CheckAndResolvePreconditionsBeforeExit(item)) { MovingOnThisUpdate.Add(item); movedOn++; } } else { //Otherwise, the chunk neither moves on nor terminates, it waits continue; } } //Remove items from the queue when they move on foreach (var item in MovingOnThisUpdate) { queue.Remove(item); } //Remove items from the queue when they terminate foreach (var item in terminatingThisUpdateHelper) { queue.Remove(item); } ///Items in the going backwards list have already been removed from the queue, ///but we need to make sure they should really be going backwards, not just terminating GoingBackwardsThisUpdate.RemoveWhere((id) => TerminateHereCondition(id)); //Clear the terminating helper list terminatingThisUpdateHelper.Clear(); }
public void ItemRemoved(TileItem item) { if (cfg.filter.Applies(item.def)) { if (item.amtAvailable > 0) { queue.Remove(item); } else { unavailable.Remove(item); } } }
internal bool UnSubscribeBindModule(Module moudle) { using (new LockWait(ref _lock)) { Type type = moudle.GetType(); if (!_dic.ContainsKey(type)) { Log.E(string.Format("01,Have Not Bind Module | Type {0} Name {1}", type, moudle.name)); return(false); } else { var list = _dic[type]; if (!list.ContainsKey(moudle.name)) { Log.E(string.Format("02,Have Not Bind Module | Type {0} Name {1}", type, moudle.name)); return(false); } else { _dic[type].Remove(moudle.name); if (_queue.Contains(moudle)) { _queue.Remove(moudle); } return(true); } } } }
public void DeRegisterEntity(ITimeNode node) { if (turnQueue.Contains(node)) { turnQueue.Remove(node); } }
public void Pause() { if (_queue.Contains(this)) { _queue.Remove(this); } }
public SimplePriorityQueue <Node> GetUnvisitedNeighbors(Node node, List <List <Node> > grid) { SimplePriorityQueue <Node> neighbors = new SimplePriorityQueue <Node>(); int row = node.Row; int column = node.Column; if (row > 0) { neighbors.Enqueue(grid[row - 1][column], 1000000); } if (row < grid.Count - 1) { neighbors.Enqueue(grid[row + 1][column], 1000000); } if (column > 0) { neighbors.Enqueue(grid[row][column - 1], 1000000); } if (column < grid[0].Count - 1) { neighbors.Enqueue(grid[row][column + 1], 1000000); } foreach (Node node1 in neighbors) { if (node1.IsVisited) { neighbors.Remove(node1); } } return(neighbors); }
public void refershNoveltyMapsBatch() { int i = 0; TileObject[,] tmpStrAlias; Vector2Int startMainMap = ParameterManager.Instance.StartCell; GeneratorUIManager.Instance.deleteMapOnUI(AliasDragAreas[2].GetChild(0)); AliasDragAreas[2].GetComponent <MapListManager>().dictionaryMap.Clear(); if (SimilarMapsQueue.Count <= 0) { GenerateAndTestAliasMaps(); } while (i < BatchAliasNumber) { tmpStrAlias = SimilarMapsQueue.Last(); float dst = SimilarMapsQueue.GetPriority(tmpStrAlias); SimilarMapsQueue.Remove(tmpStrAlias); Utility.renderAliasOnUI(AliasDragAreas[2].GetChild(0).GetComponent <RectTransform>(), ParameterManager.Instance.GridType, new StructuredAlias(tmpStrAlias, startMainMap, ParameterManager.Instance.EndCell, dst), AliasPrefab, true); i++; } }
/// <summary> /// Generar el árbol SPF de la topología en un proyecto, tomando el parámetro como punto de origen /// </summary> /// <param name="idRouterOrigen"></param> /// <param name="idProyecto"></param> /// <returns></returns> public static SimplePriorityQueue<NodoDijkstra> GenerarRutas(NodoDijkstra idRouterOrigen, int idProyecto) { idRouterOrigen.nMinDistancia = 0.0; SimplePriorityQueue<NodoDijkstra> routerQueue = new SimplePriorityQueue<NodoDijkstra>(); routerQueue.Enqueue(idRouterOrigen, 1); while (routerQueue.Count > 0) { NodoDijkstra currentRouter = routerQueue.Dequeue(); //Visita cada enlace adyacente al router u foreach (var enlace in currentRouter.listaEnlaces) { NodoDijkstra vecino = new NodoDijkstra(enlace.idRouterB, idProyecto); double nPesoBandwidth = enlace.nBandwidth; double nDistanciaTotal = currentRouter.nMinDistancia + nPesoBandwidth; if (nDistanciaTotal < vecino.nMinDistancia) { routerQueue.Remove(vecino); vecino.nMinDistancia = nDistanciaTotal; vecino.idRouterPrevio = currentRouter; routerQueue.Enqueue(vecino, 1); } } } return routerQueue; }
protected void RemoveFromOpenQueue(SearchNode node) { if (node.Opened) { m_openQueue.Remove(node.Pos); node.Opened = false; node.SetSearchType(SearchType.None, true, true); } }
// javadoc: // "Returns true if and only if this queue contained the specified element" public bool remove(T t) { if(!simplePriorityQueue.Contains(t)) { return false; } simplePriorityQueue.Remove(t); return true; }
/* Function: deleteCircleEvent * --------------------------- * Deletes a potential circle event associated with a site event * from the event queue (it is no longer correct). */ private void deleteCircleEvent(BeachArc arc) { CircleEvent ce = arc.circleEvent; arc.circleEvent = null; if (ce != null && sweepLine.Contains(ce)) { sweepLine.Remove(ce); usedCircleEvents.Add(ce); } }
public IEnumerable <IJob> Pop(Predicate <IJob> predicate) { lock (queue) { IEnumerable <IJob> matchedJobs = queue.Where(job => predicate(job)); foreach (IJob job in matchedJobs) { queue.Remove(job); } return(matchedJobs); } }
/// <summary> /// remove "state" from open list and add it again with new priority /// </summary> /// <param name="state">the "state" that we change his priority</param> /// <param name="priority">the new priority of "state"</param> public void RemoveAndAddElementToOpenList(State <T> state, float priority) { foreach (State <T> var in openList) { if (var.Equals(state)) { openList.Remove(var); openList.Enqueue(state, priority); return; } } }
private bool RemoveFromQueue(AudioMixerSnapshot i_Snapshot) { AudioMixerSnapshotData snapshotData = GetDataFromQueue(i_Snapshot); if (snapshotData == null) { return(false); } m_Queue.Remove(snapshotData); return(true); }
/// <summary> /// The search command searches a searchable domain using the BFS algorithm /// </summary> /// <param name="domain">The search domain</param> /// <returns>The solution to the problem</returns> public Solution <T> search(ISearchable <T> domain) { openList = new SimplePriorityQueue <State <T> >(); closed = new HashSet <State <T> >(); // We use a dictionary to hold the states in addition to the PriorityQueue to allow // random 'get' in O(1), a function not supported by Priority Queues (Space complexity is // now 2n as opposed to n). openListContains = new Dictionary <State <T>, double>(); // We add the inital state to the list of states to check addNode(domain.getInitialState()); while (openList.Count > 0) { State <T> nextNode = RemoveTopNode(); // If we have reached the goal state, we generate the traceback and return it as a // solution if (nextNode.Equals(domain.getGoalState())) { return(Backtrace(nextNode, numberOfEvaluations)); } // getPossibleStates intializes our states, and assigns the predecessors as needed List <State <T> > followingNodes = domain.getPossibleStates(nextNode); foreach (State <T> node in followingNodes) { // If the node isn't in the open list we add it - as long as its not in the closed // list if (!openListContains.ContainsKey(node)) { if (!closed.Contains(node)) { addNode(node); } } // If the node is in the open list already, if we have found a cheaper path to it // Then we update the predecessor node and its cost. else { if (openListContains[node] > node.cost) { openList.Remove(node); openListContains[node] = node.cost; addNode(node); } } } } // If we reach here, there is no path to the destination return(null); }
public bool RemoveTargetTree(TreeScript tree) { if (!AreAvailableTreesInSanctuary()) { return(false); } if (!TreesPriorityQueue.Contains(tree)) { Debug.Log($"{tree.gameObject}Tree is not in the list of trees"); return(false); } TreesPriorityQueue.Remove(tree); return(true); }
/// <summary> /// /// </summary> /// <param name="elapsed">time since update was last called, in microseconds</param> public void Update(double elapsed) { if (Playing) { Time += elapsed / Tempo * PPQ; while (tasks.Count > 0) { Scheduled nxt = tasks.First; if (tasks.GetPriority(nxt) > Time) { break; } nxt(); tasks.Remove(nxt); } } }
public void UCSearch(int v, int g, bool[] visited, ref int[] parent) { SimplePriorityQueue<int, int> open = new SimplePriorityQueue<int, int>(); // Orders elements in an ascending order by weight. int[] cost = new int[size]; // Stores the least cost for all nodes starting with the start node until the goal node. for (int i = 0; i < size; i++) { cost[i] = 0; } open.Enqueue(v, 0); while (open.Count > 0) { int x = open.Dequeue(); visited[x] = true; // We mark the visited node here since UCS only expands the node with the least cost if (x == g) { return; } for (int i = 0; i < adj[x].Count; i++) { if (!open.Contains(adj[x][i]) && !visited[adj[x][i]]) // Adding nodes with their costs too the Queue { parent[adj[x][i]] = x; cost[adj[x][i]] = cost[x] + weight[x][i]; open.Enqueue(adj[x][i], cost[adj[x][i]]); } else if (open.Contains(adj[x][i])) // Updates the cost of a node in case a new path is found { int temp = cost[adj[x][i]]; cost[adj[x][i]] = Math.Min(cost[adj[x][i]], (cost[x] + weight[x][i])); if (cost[adj[x][i]] < temp) // If the new path has a smaller cost, then update the node and push it back to Queue { parent[adj[x][i]] = x; // Update the parent in case a less-cost path is found. open.Remove(adj[x][i]); open.Enqueue(adj[x][i], cost[adj[x][i]]); } } } } // End of while } // End of UCSearch function
public int LabyrinthPath(Tile startTile, Tile startBoundaryTile, ref SimplePriorityQueue <Tile> potentialStarts) { Tile next = startTile; Tile nextBoundaryTile = startBoundaryTile; Stack <int> quadrants = new Stack <int>(); quadrants.Push(Quadrant(next, startBoundaryTile)); int count = 0; int maxLabyrinthCount = 1000; do { LabyrinthStep(startTile, startBoundaryTile, ref next, ref nextBoundaryTile, ref quadrants); if (potentialStarts.Contains(next)) { potentialStarts.Remove(next); } }while ((next != startTile || startBoundaryTile != nextBoundaryTile) && count++ < maxLabyrinthCount); // Winding number Debug.Log(String.Format("Winding number is {0}", quadrants.Count)); return(quadrants.Count / 4); }
public void CancelAssetsAsync(int index) { if (!resultDic.TryGetValue(index, out var result)) { return; } if (result.IsDone()) { return; } if (requestDic.TryGetValue(result.ID, out var request)) { if (waitingRequestQueue.Contains(request)) { waitingRequestQueue.Remove(request); } else if (runningRequestList.Contains(request)) { runningRequestList.Remove(request); foreach (var assetPath in request.paths) { if (assetNodeDic.TryGetValue(assetPath, out var assetNode)) { assetNode.ReleaseRef(); } else { Debug.LogError(""); } } OnAsyncRequestCancel(request); } requestDic.Remove(request.id); } }
public List <Node> FindPath(Node start, Node goal) { start.fScore = start.DistanceTo(goal); SimplePriorityQueue <Node> open = new SimplePriorityQueue <Node>(); open.Enqueue(start, start.fScore); List <Node> closed = new List <Node>(); Dictionary <Node, float> gscores = new Dictionary <Node, float>(); while (open.Count != 0) { Node current = open.Dequeue(); if (current.Equals(goal)) { return(ReconstructPath(current)); } closed.Add(current); foreach (Node neighbor in Neighbors(current)) { if (closed.Contains(neighbor)) { continue; } float tempGscore = current.gScore + world.Map.TileCost(neighbor); if (neighbor.DistanceTo(current) > 1) { tempGscore += neighbor.sqrt2; } if (open.Contains(neighbor)) { if (gscores[neighbor] <= tempGscore) { continue; } else { open.Remove(neighbor); } } neighbor.gScore = tempGscore; neighbor.fScore = tempGscore + neighbor.DistanceTo(goal, true); neighbor.CameFrom = current; gscores[neighbor] = neighbor.gScore; open.Enqueue(neighbor, neighbor.fScore); } } return(new List <Node>()); }
public List <PathNode> GetPath(Vector3Int from, Vector3Int to) { // We will allow paths that start and end at map objects but NOT go through them. // If the from and to are the same, then the path is just the from if (from == to) { return new List <PathNode> { from } } ; // Visited will store black nodes HashSet <PathNode> visited = new HashSet <PathNode>(); SimplePriorityQueue <PathNode> queue = new SimplePriorityQueue <PathNode>(); queue.Enqueue(from, 0); while (queue.Count > 0) { // Getting a node from the queue means that there is no better path to this // node. This means that it is done being visited, so we can turn it black and // add it to the visited set. PathNode current = queue.Dequeue(); visited.Add(current); if (current.x == to.x && current.y == to.y) { List <PathNode> path = new List <PathNode>(); while (current != null) { path.Insert(0, current); current = current.prev; } return(path); } for (int x = current.x - 1; x <= current.x + 1; x++) { for (int y = current.y - 1; y <= current.y + 1; y++) { if (x == current.x && y == current.y) { continue; // Skip center } if (x != current.x && y != current.y) { continue; // Skip diagonals } PathNode next = new PathNode { x = x, y = y, prev = current, cost = current.cost + 1 }; if (level.IsMapObject(next) && !(next.x == to.x && next.y == to.y)) { continue; // Skip map objects unless it's the goal } if (level.IsOutOfBounds(next)) { continue; // Skip out of bounds locations } // Gray node - in the middle of processing if (queue.Contains(next)) { if (queue.GetPriority(next) > next.cost) { queue.Remove(next); queue.Enqueue(next, next.cost); } } // White node - never seen before else if (!visited.Contains(next)) { queue.Enqueue(next, next.cost); } } } } return(null); } }
public void Unsubscribe(Object subscriber, Action <Object, TEventArgs> a) { subscribers.Remove(subscribers.First(x => x.Key.Equals(subscriber) && x.Value.Equals(a))); }
public static LinkedList <T> AStar <T>(T fromVertice, T toVertice) where T : IVertice { if (fromVertice == null || toVertice == null) { Debug.LogError("fromVertice: " + (fromVertice == null ? "null" : "ref") + " toVertice: " + (toVertice == null ? "null" : "ref")); return(new LinkedList <T>()); } var closedSet = new List <T>(); var openSet = new SimplePriorityQueue <T, float>(); // new List<T>(); openSet.Enqueue(fromVertice, 0f); var cameFrom = new Dictionary <T, T>(); var gScore = new Dictionary <T, float> { [fromVertice] = 0f }; var fScore = new Dictionary <T, float> { [fromVertice] = HeuristicCostEstimate(fromVertice, toVertice) }; while (openSet.Count > 0) { var currentVertice = openSet.First(); if (currentVertice.Equals(toVertice)) { return(ReconstructPath(cameFrom, currentVertice)); } openSet.Remove(currentVertice); closedSet.Add(currentVertice); foreach (T neighborVertice in currentVertice.Neighbors) { if (closedSet.Contains(neighborVertice)) { continue; } var tentativeGScore = gScore[currentVertice] + Vector3.Distance(currentVertice.Position, neighborVertice.Position); if (openSet.Contains(neighborVertice) && tentativeGScore >= gScore[neighborVertice]) { continue; } cameFrom[neighborVertice] = currentVertice; gScore[neighborVertice] = tentativeGScore; fScore[neighborVertice] = gScore[neighborVertice] + HeuristicCostEstimate(neighborVertice, toVertice); if (openSet.Contains(neighborVertice)) { openSet.Remove(neighborVertice); } openSet.Enqueue(neighborVertice, fScore[neighborVertice]); } } return(new LinkedList <T>()); }
/// <summary> /// Generar y retornar el árbol SPF de la topología en un proyecto, tomando el parámetro como punto de origen /// </summary> /// <param name="idRouterOrigen"></param> /// <param name="idProyecto"></param> /// <param name="minBW"></param> /// <returns></returns> public static List<NodoDijkstra> GenerarRutas(NodoDijkstra idRouterOrigen, int idProyecto, double minBW, int nTipoMetrica = 2, int idAfinidad = 0) { idRouterOrigen.nMinDistancia = 0.0; SimplePriorityQueue<NodoDijkstra> routerQueue = new SimplePriorityQueue<NodoDijkstra>(); routerQueue.Enqueue(idRouterOrigen, 1); //mantiene el registro de todos los nodos de la topologia por el que se pasa List<NodoDijkstra> routerList = new List<NodoDijkstra>(); routerList.Add(idRouterOrigen); while (routerQueue.Count > 0) { NodoDijkstra currentRouter = routerQueue.Dequeue(); //Visita cada enlace adyacente al router u foreach (var enlace in currentRouter.listaEnlacesDijkstra) { int idRouterVecino = 0; //Fix: Asegurandose de que se use el id del router adyacente en el enlace if (enlace.idRouterB != currentRouter.idRouter) { idRouterVecino = enlace.idRouterB; //enlace.target = enlace.targetB; } else { idRouterVecino = enlace.idRouterA; //enlace.target = enlace.targetA; } //NodoDijkstra vecino = new NodoDijkstra(idRouterVecino, idProyecto); NodoDijkstra vecino = enlace.target; double nPesoBandwidth = 0; switch(nTipoMetrica) //ignore var name, aqui va lo del tipo de peso { case 1: //Pesos Administrativos nPesoBandwidth = enlace.nPesoAdministrativo; break; case 2: //Minima Cantidad de Saltos nPesoBandwidth = 1; break; case 3: // 1/BW Reservado nPesoBandwidth = 1.00 / (enlace.nBandwidth - enlace.nBandwidthDisponible); break; case 4: // 1/BW Disponible nPesoBandwidth = 1.00 / enlace.nBandwidthDisponible; break; default: nPesoBandwidth = 1; break; } double nDistanciaTotal = currentRouter.nMinDistancia + nPesoBandwidth; //Aqui ocurre el filtro por afinidad if (idAfinidad == 0) //No afinidad definida { //En este if ocurre el filtro por BW disponible if (nDistanciaTotal < vecino.nMinDistancia && minBW < enlace.nBandwidth) //Constraint check { if (routerQueue.Contains(vecino)) routerQueue.Remove(vecino); vecino.nMinDistancia = nDistanciaTotal; vecino.idRouterPrevio = currentRouter; enlace.nBandwidthDisponible -= minBW; //reservar el BW en el enlace routerQueue.Enqueue(vecino, 1); } } else //Afinidad definida { if (idAfinidad == enlace.idAfinidad) //Afinidad check { //En este if ocurre el filtro por BW disponible if (nDistanciaTotal < vecino.nMinDistancia && minBW < enlace.nBandwidth) //Constraint check { if (routerQueue.Contains(vecino)) routerQueue.Remove(vecino); vecino.nMinDistancia = nDistanciaTotal; vecino.idRouterPrevio = currentRouter; enlace.nBandwidthDisponible -= minBW; //reservar el BW en el enlace routerQueue.Enqueue(vecino, 1); } } } //Agrega el router (bueno, los 2) al registro int indexTarget = routerList.FindIndex(n => n.idRouter == vecino.idRouter); if (indexTarget != -1) routerList[indexTarget] = vecino; else routerList.Add(vecino); int indexSource = routerList.FindIndex(n => n.idRouter == currentRouter.idRouter); if (indexSource != -1) routerList[indexSource] = currentRouter; else routerList.Add(currentRouter); } } return routerList; }
private void RemoveFromOpen(SearchNode node) { m_open.Remove(node); node.Opened = false; node.SetSearchType(SearchType.Expanded, true, true); }
public static Mesh Simplify(this Mesh originalMesh, int targetCount) { // gather distinct vertices Dictionary <Vector3, Vertex> vectorVertex = new Dictionary <Vector3, Vertex>(originalMesh.vertexCount); foreach (Triangle t in originalMesh.tris) { AddVertex(t.v1, vectorVertex); AddVertex(t.v2, vectorVertex); AddVertex(t.v3, vectorVertex); } // accumulate quadric matrices for each vertex based on its faces // assign initial quadric foreach (Triangle t in originalMesh.tris) { Matrix q = t.Quadric(); Vertex v1 = vectorVertex[t.v1]; Vertex v2 = vectorVertex[t.v2]; Vertex v3 = vectorVertex[t.v3]; v1.Quadric = v1.Quadric.Add(q); v2.Quadric = v2.Quadric.Add(q); v3.Quadric = v3.Quadric.Add(q); } //vertex -> face map Dictionary <Vertex, List <Face> > vertexFaces = new Dictionary <Vertex, List <Face> >(originalMesh.vertexCount); foreach (Triangle t in originalMesh.tris) { Vertex v1 = vectorVertex[t.v1]; Vertex v2 = vectorVertex[t.v2]; Vertex v3 = vectorVertex[t.v3]; Face f = new Face(v1, v2, v3); vertexFaces.AppendEx(v1, f); vertexFaces.AppendEx(v2, f); vertexFaces.AppendEx(v3, f); } System.Diagnostics.Stopwatch sw = System.Diagnostics.Stopwatch.StartNew(); //gather distinct pairs Dictionary <Pair.Key, Pair> pairs = new Dictionary <Pair.Key, Pair>(originalMesh.trisCount); foreach (Triangle t in originalMesh.tris) { Vertex v1 = vectorVertex[t.v1]; Vertex v2 = vectorVertex[t.v2]; Vertex v3 = vectorVertex[t.v3]; var one = sw.ElapsedMilliseconds; pairs.AddPair(v1, v2); pairs.AddPair(v2, v3); pairs.AddPair(v1, v3); } Console.WriteLine($"total: {sw.ElapsedMilliseconds}"); Dictionary <Vertex, List <Pair> > vertexPairs = new Dictionary <Vertex, List <Pair> >(originalMesh.vertexCount); foreach (KeyValuePair <Pair.Key, Pair> p in pairs) { vertexPairs.AppendEx(p.Value.A, p.Value); vertexPairs.AppendEx(p.Value.B, p.Value); } var priorityQueue = new SimplePriorityQueue <Pair, float>(CompFloats); foreach (KeyValuePair <Pair.Key, Pair> item in pairs) { item.Value.Error(); priorityQueue.Enqueue(item.Value, item.Value.CachedError); } //take best pair int currentFaceCount = originalMesh.tris.Length; int targetFaceCount = targetCount; while (currentFaceCount > targetFaceCount && priorityQueue.Count > 0) { //best pair Pair p = priorityQueue.Dequeue(); if (p.Removed) { continue; } p.Removed = true; //get distinct faces var distinctFaces = new HashSet <Face>(); if (vertexFaces.ContainsKey(p.A)) { foreach (var f in vertexFaces[p.A]) { if (!f.Removed) { if (!distinctFaces.Contains(f)) { distinctFaces.Add(f); } } } } if (vertexFaces.ContainsKey(p.B)) { foreach (var f in vertexFaces[p.B]) { if (!f.Removed) { if (!distinctFaces.Contains(f)) { distinctFaces.Add(f); } } } } //get related pairs var distintPairs = new HashSet <Pair>(); if (vertexPairs.ContainsKey(p.A)) { foreach (var q in vertexPairs[p.A]) { if (!q.Removed) { if (!distintPairs.Contains(q)) { distintPairs.Add(q); } } } } if (vertexPairs.ContainsKey(p.B)) { foreach (var q in vertexPairs[p.B]) { if (!q.Removed) { if (!distintPairs.Contains(q)) { distintPairs.Add(q); } } } } //create new vertex Vertex v = new Vertex(p.Vector(), p.Quadric()); //updateFaces var newFaces = new List <Face>(); bool valid = true; foreach (var f in distinctFaces) { var(v1, v2, v3) = (f.V1, f.V2, f.V3); if (v1 == p.A || v1 == p.B) { v1 = v; } if (v2 == p.A || v2 == p.B) { v2 = v; } if (v3 == p.A || v3 == p.B) { v3 = v; } var face = new Face(v1, v2, v3); if (face.Degenerate) { continue; } if (face.Normal().Dot(f.Normal()) < 1e-3) { valid = false; break; } newFaces.Add(face); } if (!valid) { continue; } if (vertexFaces.ContainsKey(p.A)) { vertexFaces.Remove(p.A); } if (vertexFaces.ContainsKey(p.B)) { vertexFaces.Remove(p.B); } foreach (var f in distinctFaces) { f.Removed = true; currentFaceCount--; } foreach (var f in newFaces) { currentFaceCount++; vertexFaces.AppendEx(f.V1, f); vertexFaces.AppendEx(f.V2, f); vertexFaces.AppendEx(f.V3, f); } if (vertexPairs.ContainsKey(p.A)) { vertexPairs.Remove(p.A); } if (vertexPairs.ContainsKey(p.B)) { vertexPairs.Remove(p.B); } var seen = new Dictionary <Vector3, bool>(); foreach (var q in distintPairs) { q.Removed = true; priorityQueue.Remove(q); var(a, b) = (q.A, q.B); if (a == p.A || a == p.B) { a = v; } if (b == p.A || b == p.B) { b = v; } if (b == v) { (a, b) = (b, a); // a = v } if (seen.ContainsKey(b.Vector3) && seen[b.Vector3]) { //ignore duplicates continue; } if (!seen.ContainsKey(b.Vector3)) { seen.Add(b.Vector3, true); } else { seen[b.Vector3] = true; } var np = new Pair(a, b); np.Error(); priorityQueue.Enqueue(np, np.CachedError); vertexPairs.AppendEx(a, np); vertexPairs.AppendEx(b, np); } } //gather distinct faces var finalDistinctFaces = new HashSet <Face>(); foreach (var faces in vertexFaces) { foreach (var face in faces.Value) { if (!face.Removed) { if (!finalDistinctFaces.Contains(face)) { finalDistinctFaces.Add(face); } } } } //create final mesh Mesh newMesh = new Mesh { tris = finalDistinctFaces.Select(x => new Triangle(x.V1.Vector3, x.V2.Vector3, x.V3.Vector3)).ToArray() }; return(newMesh); }
public Action[] Solve(HelirinState init, int min_life_score) { if (cost_maps == null || init == null) { return(null); } SimplePriorityQueue <HelirinState> q = new SimplePriorityQueue <HelirinState>(); Dictionary <HelirinState, StateData> data = new Dictionary <HelirinState, StateData>(); Dictionary <HelirinState, int> life_data = null; if (!Settings.allow_state_visit_with_less_life && Settings.invul_frames >= 0 && Settings.full_life >= 2) { life_data = new Dictionary <HelirinState, int>(); } // Set range of possible inputs int min_input = 0; if (Settings.min_ab_speed == 1) { min_input = 1; } else if (Settings.min_ab_speed == 2) { min_input = 9; } else if (Settings.min_ab_speed == 3) { min_input = 17; } else if (Settings.min_ab_speed > 3) { min_input = 25; } // Init HelirinState norm_init = NormaliseState(init); float cost = GetCost(init.xpos, init.ypos, init.life, init.invul, init.HasBonus()); float weight = 0; float total_cost = cost + weight; q.Enqueue(norm_init, total_cost); data.Add(norm_init, new StateData(init, weight, cost, null, null, false)); if (life_data != null) { life_data.Add(ClearLifeDataOfState(norm_init), Flooding.GetRealInvul(init.life, init.invul)); } // ProgressBar and preview settings float init_cost = cost; bool[,] preview = new bool[cost_maps[0][0].Get(true).Height, cost_maps[0][0].Get(true).Width]; int since_last_update = 0; // A* HelirinState result = null; while (q.Count > 0 && result == null) { HelirinState norm_st = q.Dequeue(); StateData st_data = data[norm_st]; st_data.already_treated = true; weight = st_data.weight + 1; // ProgressBar and preview settings preview[Physics.pos_to_px(st_data.exact_state.ypos) - f.PixelStart.y, Physics.pos_to_px(st_data.exact_state.xpos) - f.PixelStart.x] = true; since_last_update++; if (since_last_update >= Settings.nb_iterations_before_ui_update) { since_last_update = 0; parent.UpdateProgressBarAndHighlight(100 - st_data.cost * 100 / init_cost, preview); } for (int i = 24; i >= min_input; i--) { Action a = (Action)i; HelirinState nst = p.Next(st_data.exact_state, a); HelirinState norm_nst = NormaliseState(nst); // Lose / Not enough life / Out of search space ? int life_score = Flooding.GetRealInvul(nst.life, nst.invul); if (nst.gs == GameState.Lose || (life_score < min_life_score && life_score >= 0) || IsOutOfSearchSpace(nst.xpos, nst.ypos)) { continue; } // Already enqueued with more life ? HelirinState cleared_nst = null; if (life_data != null) { cleared_nst = ClearLifeDataOfState(norm_nst); int old_life_score; life_data.TryGetValue(cleared_nst, out old_life_score); // Default value for 'old_life_score' (type int) is 0. if (old_life_score > life_score) { continue; } } // Already visited ? // If the state was already visited, we should not add it to the queue again! Otherwise it could overwrite the state entry and corrupt some paths. StateData old; data.TryGetValue(norm_nst, out old); // Default value for 'old' (type StateData) is null. if (old != null && old.already_treated) { continue; } // Keep only if it is a non-infinite better cost cost = GetCost(nst.xpos, nst.ypos, nst.life, nst.invul, nst.HasBonus()); if (cost >= float.PositiveInfinity) { continue; } total_cost = cost + weight; if (old != null && total_cost >= old.cost + old.weight) { continue; } // Is the state terminal without having completed the objective? if (cost > 0 && nst.IsTerminal()) { continue; } // Everything's okay, we add the config to the data and queue StateData nst_data = new StateData(nst, weight, cost, a, norm_st, false); data[norm_nst] = nst_data; if (life_data != null) { life_data[cleared_nst] = life_score; } // Target reached ? We look at the cost rather than the game state, because the target can be different than winning if (cost <= 0) { result = norm_nst; break; } // We don't use UpdatePriority because it does not change the InsertionIndex (first-in, first-out) if (old != null) { q.Remove(norm_nst); } q.Enqueue(norm_nst, total_cost); /* * if (old == null) * q.Enqueue(norm_nst, total_cost); * else * q.UpdatePriority(norm_nst, total_cost); */ } } // Retrieve full path if (result == null) { return(null); } List <Action> res = new List <Action>(); while (result != null) { StateData sd = data[result]; if (sd.action.HasValue) { res.Add(sd.action.Value); } result = sd.previous_state; } res.Reverse(); return(res.ToArray()); }
private void Update() { int chunkX = Mathf.FloorToInt(player.position.x / chunkSize); int chunkZ = Mathf.FloorToInt(player.position.z / chunkSize); //Updating the chunks to be updated in the player's view //--> Adding the elements in a priority queue which will be updated with a coroutine if (chunkX != lastChunkPos.x || chunkZ != lastChunkPos.y) { //Removing the chunks out of player's view foreach (Chunk chunk in loadedChunks) { Vector2 chunkPos = chunk.GetPosFromMiddle(); /*if (player.position.x / 16 != lastChunkPos.x || player.position.z / 16 != lastChunkPos.y) * { * Debug.Log(Math.Abs(chunkPos.x - player.position.x / 16)); * Debug.Log(Math.Abs(chunkPos.y - player.position.z / 16)); * }*/ if (Math.Abs(chunkPos.x - player.position.x / chunkSize) > viewDistance + 2 || Math.Abs(chunkPos.y - player.position.z / chunkSize) > viewDistance + 2) { chunkToDestroy.Enqueue(chunk, chunkToDestroy.Count); } } int minx = -viewDistance + (int)(player.position.x / chunkSize); int maxx = viewDistance + (int)(player.position.x / chunkSize); int minz = -viewDistance + (int)(player.position.z / chunkSize); int maxz = viewDistance + (int)(player.position.z / chunkSize); for (int i = minx; i < maxx; i++) { for (int j = minz; j < maxz; j++) { Chunk toLoad = world.GenerateOrGetFromMiddle(i, j); if (chunkToDestroy.Contains(toLoad)) { chunkToDestroy.Remove(toLoad); //Debug.Log("ToBeDestroyed removed chunk : " + toLoad.GetPosFromMiddle().x + " / " + toLoad.GetPosFromMiddle().y); } if (!loadedChunks.Contains(toLoad) && !loadedMeshes.ContainsKey(toLoad)) { chunkPriority.Enqueue(toLoad, chunkPriority.Count); } } } lastChunkPos.x = chunkX; lastChunkPos.y = chunkZ; //Debug.Log("To be destroyed : " + chunkToDestroy.Count); //Debug.Log("To be loaded : " + chunkPriority.Count); while (chunkToDestroy.Count > 0) { Chunk chunk = chunkToDestroy.Dequeue(); loadedChunks.Remove(chunk); if (loadedMeshes.ContainsKey(chunk)) { GameObject mesh = loadedMeshes[chunk]; Destroy(mesh); } loadedMeshes.Remove(chunk); //Debug.Log("Removed chunk : " + chunk.GetPosFromMiddle().x + " / " + chunk.GetPosFromMiddle().y); } StartCoroutine(LoadAwaitingChunks()); } }
private static PathfindingResult RunPathfinder(Axial start, Axial end) { SimplePriorityQueue <Node> openNodes = new SimplePriorityQueue <Node>(); Dictionary <Axial, Node> closedNodes = new Dictionary <Axial, Node>(); Node startNode = new Node(start, 0, GetHeuristic(start, end)); Node current = null; openNodes.Enqueue(startNode, startNode.Cost); while (openNodes.Count > 0) { current = openNodes.Dequeue(); closedNodes.Add(current.Position, current); if (current == end) // Reached end { break; } foreach (AxialDirection direction in AxialDirection.AllDirections) { Axial neighborPosition = current.Position + direction; if (!Utility.IsAxialPositionWalkable(neighborPosition)) { continue; } float newNeighborCost = current.Cost + GetMovementCost(current, direction); float heuristic = GetHeuristic(neighborPosition, end); Node neighbor = new Node(neighborPosition, newNeighborCost, heuristic, current); // Update surrounding nodes if we have a better path, even if they've already been evaluated if (openNodes.Contains(neighbor)) { // The heuristic is deterministic, so this will get us the pure cost of the node float currentCost = openNodes.GetPriority(neighbor) - heuristic; if (newNeighborCost < currentCost) { openNodes.Remove(neighbor); } } if (closedNodes.ContainsKey(neighborPosition)) { float currentCost = closedNodes[neighborPosition].Cost; if (newNeighborCost < currentCost) { closedNodes.Remove(neighborPosition); } } // Add them to the queue if applicable if (!openNodes.Contains(neighbor) && !closedNodes.ContainsKey(neighbor.Position)) { openNodes.Enqueue(neighbor, neighbor.FullValue); } } } return(CalculateResults(current)); PathfindingResult CalculateResults(Node endNode) { if (endNode == null) { Debug.LogError($"Couldn't calculate path between {start} and {end}"); return(null); } PathfindingResult result = new PathfindingResult() { OpenNodes = openNodes, CloesdNodes = closedNodes, }; result.FinishedPath = new LinkedList <Vector2>(); result.FinishedPath.AddFirst(endNode); Node currentNode = endNode; while (currentNode.Parent != null) { currentNode = closedNodes[currentNode.Parent.Value]; result.FinishedPath.AddFirst(currentNode); } // We're already in the first node, walking to it would just force us to walk to the center of the node, and we don't want that. result.FinishedPath.RemoveFirst(); return(result); } }
private void Delete(SearchNode X) { m_openQueue.Remove(X); X.Closed = true; }