static State AStar(State initial) { Console.WriteLine("Finding best path using A*..."); HashSet <State> closed = new HashSet <State>(); C5.IntervalHeap <State> open = new C5.IntervalHeap <State>(new HeuristicComparer(new Point(0, 0))); open.Add(initial); State goal = null; while (open.Count > 0) { var current = open.DeleteMax(); if (current.IsGoal()) { goal = current; break; } closed.Add(current); List <State> nextStates = current.GetSuccessors(); foreach (var next in nextStates) { if (closed.Contains(next)) { continue; } else { open.Add(next); } } } Console.WriteLine("Steps to goal: {0}", goal.GetStepsFromStart()); return(goal); }
public void IterationCleanup() { c5_arraylist.Clear(); while (c5_circularqueue.Count > 0) { _ = c5_circularqueue.Dequeue(); // There's no Clear() so we have to do this } c5_hashbag.Clear(); c5_hashedarraylist.Clear(); c5_hashedlinkedlist.Clear(); c5_hashset.Clear(); while (c5_intervalheap.Count > 0) { _ = c5_intervalheap.DeleteMax(); // There's no Clear() so we have to do this } c5_linkedlist.Clear(); c5_sortedarray.Clear(); c5_treebag.Clear(); c5_treeset.Clear(); collectathon_boundedarray.Clear(); collectathon_dynamicarray.Clear(); collectathon_singlylinkedlist.Clear(); msft_hashset.Clear(); msft_linkedlist.Clear(); msft_list.Clear(); msft_queue.Clear(); msft_segmentedlist.Clear(); msft_sortedset.Clear(); msft_stack.Clear(); }
public KeyValuePair <TKey, TValue> PopMax() { var item = _heap.DeleteMax(); _ = _handles.Remove(item.Key); _ = _dict.Remove(item.Key); return(item); }
/// <summary> /// Removes and returns the maximum item /// </summary> /// <returns></returns> public T DeleteMax() { IndexedItem item = _priQueue.DeleteMax(); _index.Remove(item.Key); return(item.Value); }
public int minDeletionsToMakeFrequencyOfEachLetterUnique(string s) { // counter of characters to delete int count = 0; // array of counters of occurrences for all possible characters Dictionary <char, int> freq = new Dictionary <char, int>(); foreach (char c in s) { if (freq.ContainsKey(c)) { freq[c]++; } else { freq.Add(c, 1); } } var heap = new C5.IntervalHeap <int>(); heap.AddAll(freq.Select(i => i.Value)); while (heap.Count > 0) { // take the biggest frequency of a character int most_frequent = heap.FindMax(); heap.DeleteMax(); if (heap.Count == 0) { return(count); } // if this frequency is equal to the next one // and bigger than 1 decrease it to 1 and put // back to the queue if (most_frequent == heap.FindMax()) { if (most_frequent > 1) { heap.Add(most_frequent - 1); } count++; } // all frequencies which are bigger than // the next one are removed from the queue // because they are unique } return(count); }
public int GetShortestDistance(string from, string to) { // Dijkstra Algorithm var distances = new Dictionary <Node, int>(); var previousNodes = new Dictionary <Node, Node>(); var queue = new C5.IntervalHeap <NodeEntry>(new NodeComp()); // Priority Queue var visited = new HashSet <Node>(); var fromNode = GetNode(from); var fromNodeEntry = new NodeEntry(fromNode, 0); var toNode = GetNode(to); foreach (var node in itemsMap.Values) { distances[node] = int.MaxValue; previousNodes[node] = null; } distances[fromNode] = 0; if (fromNode == null || toNode == null) { throw new System.InvalidOperationException("Input nodes are invalid"); } queue.Add(fromNodeEntry); while (queue.Count > 0) { var currentNode = queue.DeleteMax().GetNode(); visited.Add(currentNode); foreach (var edge in currentNode.GetEdges()) { if (visited.Contains(edge.to)) { continue; } var oldDistance = distances[edge.to]; var currentDistance = distances[currentNode] + edge.weight; if (currentDistance < oldDistance) { distances[edge.to] = currentDistance; previousNodes[edge.to] = currentNode; queue.Add(new NodeEntry(edge.to, currentDistance)); } } } return(distances[toNode]); }
/// <summary> /// Clear the image queue. /// </summary> /// <returns>The number of requests cleared.</returns> public int ClearImageQueue() { int requestsDeleted; lock (m_priorityQueue) { requestsDeleted = m_priorityQueue.Count; // Surprisingly, there doesn't seem to be a clear method at this time. while (!m_priorityQueue.IsEmpty) { m_priorityQueue.DeleteMax(); } } return(requestsDeleted); }
private void PrioritizedSweeping() { int N = Mathf.Min(BatchSize, Pq.Count); /*if(_pq.Count > 0) * Debug.Log("LEARNING " + _pq.Count);*/ var batch = Enumerable.Range(0, N).Select(i => Pq.DeleteMax()).ToList(); TrainModel(batch); foreach (var sars in batch) { if (Preds.ContainsKey(sars.State)) { foreach (var pred in Preds[sars.State].Shuffle().Take(PredecessorCap)) { EnqueueSARS(pred); } } } }
public override void Solve(State state) { var visited = new HashSet <Board>(); var queue = new C5.IntervalHeap <State>(); queue.Add(state); visited.Add(state.CurrentBoard); while (queue.Count > 0) { if (queue.Count > this.MaxFringeSize) { this.MaxFringeSize = queue.Count; } state = queue.DeleteMax(); if (state.CurrentBoard.IsEqual(this.GoalState)) { this.PrintResults(state, queue.Count); break; } var zeroXAndY = state.CurrentBoard.IndexOfZero(); var zeroX = zeroXAndY.Item1; var zeroY = zeroXAndY.Item2; var children = this.GenerateChildrenStates(state, zeroX, zeroY); for (var i = children.Count - 1; i >= 0; i--) { var currentChild = children[i]; if (!visited.Contains(currentChild.CurrentBoard)) { queue.Add(currentChild); visited.Add(currentChild.CurrentBoard); } } } }
static void AStar() { Console.WriteLine("Finding best path using A*..."); Space start = new Space(1, 1); Space goal = new Space(31, 39); HashSet <Space> closed = new HashSet <Space>(); C5.IntervalHeap <Space> open = new C5.IntervalHeap <Space>(new HeuristicComparer(goal)); open.Add(start); while (open.Count > 0) { var current = open.DeleteMax(); if (current.Equals(goal)) { goal = current; break; } closed.Add(current); List <Space> points = current.GetPointsAround(); foreach (var p in points) { if (closed.Contains(p)) { continue; } else if (p.IsWall) { closed.Add(p); } else { open.Add(p); } } } Console.WriteLine("Steps to goal: {0}", goal.GetStepsFromStart()); }
static public List <Vector2> getPath(Vector2 startpoint, Vector2 endpoint, BoardRunner board) { C5.IntervalHeap <Position> heap = new C5.IntervalHeap <Position>(); //A* search, start with startpoint, make priorityqueue of positions, and keep adding Dictionary <int, Position> visitedPoints = new Dictionary <int, Position>(board.sizeX * board.sizeY); Position start = new Position(null, startpoint, 0, Vector2.Distance(startpoint, endpoint)); heap.Add(start); visitedPoints[getPosition(startpoint, board)] = start; while (!heap.IsEmpty) { Position cur = heap.DeleteMax(); if (cur.curPosition == endpoint) { List <Vector2> solution = getSolution(cur); return(solution); } if (cur.stepsTaken > MAX_STEPS) { return(null); } //up, down, left, right, diagonals explorePosition(endpoint, cur, visitedPoints, heap, board, 1, 0); explorePosition(endpoint, cur, visitedPoints, heap, board, -1, 0); explorePosition(endpoint, cur, visitedPoints, heap, board, 0, 1); explorePosition(endpoint, cur, visitedPoints, heap, board, 0, -1); explorePosition(endpoint, cur, visitedPoints, heap, board, 1, 1); explorePosition(endpoint, cur, visitedPoints, heap, board, 1, -1); explorePosition(endpoint, cur, visitedPoints, heap, board, -1, 1); explorePosition(endpoint, cur, visitedPoints, heap, board, -1, -1); } return(null); }
public CustomWeightedGraph MinimumSpanTree() { // Prim's Algorithm var minSpanTree = new CustomWeightedGraph(); var totalNodes = GetTotalNodes(); var nodes = new HashSet <Node>(); var edgesPriorityQueue = new C5.IntervalHeap <Edge>(new EdgeComp()); if (itemsMap.Count == 0) { return(minSpanTree); // Error Handling } var node = itemsMap.Values.First(); // Randomly pick the first node to add to Minimum Span Tree nodes.Add(node); minSpanTree.AddNode(node.label); edgesPriorityQueue.AddAll(node.GetEdges()); while (nodes.Count < totalNodes) { var minEdge = edgesPriorityQueue.DeleteMax(); if (minSpanTree.ContainNode(minEdge.to.label) && minSpanTree.ContainNode(minEdge.from.label)) { continue; } node = minEdge.to; nodes.Add(node); minSpanTree.AddNode(node.label); minSpanTree.AddEdge(minEdge.from.label, minEdge.to.label, minEdge.weight); edgesPriorityQueue.AddAll(node.GetEdges().Where(e => !minSpanTree.ContainNode(e.to.label))); } return(minSpanTree); }
public bool ProcessImageQueue(int packetsToSend) { int StartTime = Util.EnvironmentTickCount(); int packetsSent = 0; List <J2KImage> imagesToReAdd = new List <J2KImage>(); while (packetsSent < packetsToSend) { J2KImage image = GetHighestPriorityImage(); // If null was returned, the texture priority queue is currently empty if (image == null) { break; //Break so that we add any images back that we might remove because they arn't finished decoding } if (image.IsDecoded) { if (image.Layers == null) { //We don't have it, tell the client that it doesn't exist m_client.SendAssetUploadCompleteMessage((sbyte)AssetType.Texture, false, image.TextureID); RemoveImageFromQueue(image); packetsSent++; } else { int sent; bool imageDone = image.SendPackets(m_client, packetsToSend - packetsSent, out sent); packetsSent += sent; // If the send is complete, destroy any knowledge of this transfer if (imageDone) { RemoveImageFromQueue(image); } } } else { //Add it to the other queue and delete it from the top imagesToReAdd.Add(image); m_priorityQueue.DeleteMax(); packetsSent++; //We tried to send one // UNTODO: This was a limitation of how LLImageManager is currently // written. Undecoded textures should not be going into the priority // queue, because a high priority undecoded texture will clog up the // pipeline for a client //return true; } } //Add all the ones we removed so that we wouldn't block the queue if (imagesToReAdd.Count != 0) { foreach (J2KImage image in imagesToReAdd) { this.AddImageToQueue(image); } } int EndTime = Util.EnvironmentTickCountSubtract(StartTime); IMonitorModule module = m_client.Scene.RequestModuleInterface <IMonitorModule>(); if (module != null) { IImageFrameTimeMonitor monitor = (IImageFrameTimeMonitor)module.GetMonitor(m_client.Scene.RegionInfo.RegionID.ToString(), "Images Frame Time"); monitor.AddImageTime(EndTime); } return(m_priorityQueue.Count > 0); }
private bool VertexGuidedSearch(int iv, int iw) { var v = _nodes[iv]; var w = _nodes[iw]; f.Clear(); b.Clear(); f.Add(w); w.Value.InF = true; b.Add(v); v.Value.InB = true; w.Value.OutEnum = w.Value.Outgoing.GetEnumerator(); w.Value.OutEnum.MoveNext(); v.Value.InEnum = v.Value.Incoming.GetEnumerator(); v.Value.InEnum.MoveNext(); var fl = new C5.IntervalHeap<SGTNode<HKMSTNode>>(); var bl = new C5.IntervalHeap<SGTNode<HKMSTNode>>(); if (w.Value.OutEnum.Current != null) fl.Add(w); if (v.Value.InEnum.Current != null) bl.Add(v); // For ease of notation, we adopt the convention that the // minimum of an empty set is bigger than any other value and the maximum of an empty // set is smaller than any other value. SGTNode<HKMSTNode> u = null; SGTNode<HKMSTNode> z = null; if(fl.Count > 0) u = fl.FindMin(); if(bl.Count > 0) z = bl.FindMax(); while (fl.Count > 0 && bl.Count > 0 && (u == z || _nodeOrder.Query(z, u))) { // SEARCH-STEP(vertex u, vertex z) var x = u.Value.OutEnum.Current; var y = z.Value.InEnum.Current; u.Value.OutEnum.MoveNext(); z.Value.InEnum.MoveNext(); if (u.Value.OutEnum.Current == null) fl.DeleteMin(); if (z.Value.InEnum.Current == null) bl.DeleteMax(); if(x.Value.InB) { f.ForEach(item => item.Value.InF = false); b.ForEach(item => item.Value.InB = false); return false; // Pair(uz.from, x.Current); } else if (y.Value.InF) { f.ForEach(item => item.Value.InF = false); b.ForEach(item => item.Value.InB = false); return false; // Pair(y.Current, uz.to); } if (!x.Value.InF) { f.Add(x); x.Value.InF = true; x.Value.OutEnum = x.Value.Outgoing.GetEnumerator(); x.Value.OutEnum.MoveNext(); if (x.Value.OutEnum.Current != null) fl.Add(x); } if (!y.Value.InB) { b.Add(y); y.Value.InB = true; y.Value.InEnum = y.Value.Incoming.GetEnumerator(); y.Value.InEnum.MoveNext(); if (y.Value.InEnum.Current != null) bl.Add(y); } // End of SEARCH-STEP(vertex u, vertex z) if (fl.Count > 0) u = fl.FindMin(); if (bl.Count > 0) z = bl.FindMax(); } // let t = min({v}∪{x ∈ F|out(x) = null} and reorder the vertices in F< and B> as discussed previously. var vAndf = f.FindAll(item => item.Value.OutEnum.Current != null); vAndf.Add(v); var t = vAndf.Min(); // Let F< = { x ∈ F | x < t} and var fb = f.FindAll(item => item.Label < t.Label); // B > = { y ∈ B | y > t}. var bf = b.FindAll(item => item.Label > t.Label); if(t == v) { // move all vertices in fb just after t ... bf is empty foreach (var node in fb) _nodeOrder.Remove(node); if(fb.Count > 1) fb = TopoSort(fb); if (fb.Count > 0) { var prev = _nodeOrder.insertAfter(t, fb[0]); for (int i = 1; i < fb.Count; i++) prev = _nodeOrder.insertAfter(prev, fb[i]); } } if (t.Label < v.Label) { // move all vertices in fb just before t and all vertices in bf just before all vertices in fb // This is required as the articles states if (bf.Count > 1) bf = TopoSort(bf); if (fb.Count > 1) fb = TopoSort(fb); foreach (var node in bf) _nodeOrder.Remove(node); foreach (var node in fb) _nodeOrder.Remove(node); foreach (var item in fb) bf.Add(item); if (bf.Count > 0) { var prev = _nodeOrder.insertBefore(t, bf[bf.Count-1]); if(bf.Count > 1) { for (int i = bf.Count - 2; i >= 0; i--) prev = _nodeOrder.insertBefore(prev, bf[i]); } } } // reset bools f.ForEach(item => item.Value.InF = false); b.ForEach(item => item.Value.InB = false); // all done add to Outgoing and Incoming _nodes[iv].Value.Outgoing.Add(_nodes[iw]); _nodes[iw].Value.Incoming.Add(_nodes[iv]); return true; }
private bool VertexGuidedSearch(int iv, int iw) { var v = _nodes[iv]; var w = _nodes[iw]; f.Clear(); b.Clear(); f.Add(w); w.Value.InF = true; b.Add(v); v.Value.InB = true; w.Value.OutEnum = w.Value.Outgoing.GetEnumerator(); w.Value.OutEnum.MoveNext(); v.Value.InEnum = v.Value.Incoming.GetEnumerator(); v.Value.InEnum.MoveNext(); var fl = new C5.IntervalHeap <SGTNode <HKMSTNode> >(); var bl = new C5.IntervalHeap <SGTNode <HKMSTNode> >(); if (w.Value.OutEnum.Current != null) { fl.Add(w); } if (v.Value.InEnum.Current != null) { bl.Add(v); } // For ease of notation, we adopt the convention that the // minimum of an empty set is bigger than any other value and the maximum of an empty // set is smaller than any other value. SGTNode <HKMSTNode> u = null; SGTNode <HKMSTNode> z = null; if (fl.Count > 0) { u = fl.FindMin(); } if (bl.Count > 0) { z = bl.FindMax(); } while (fl.Count > 0 && bl.Count > 0 && (u == z || _nodeOrder.Query(z, u))) { // SEARCH-STEP(vertex u, vertex z) var x = u.Value.OutEnum.Current; var y = z.Value.InEnum.Current; u.Value.OutEnum.MoveNext(); z.Value.InEnum.MoveNext(); if (u.Value.OutEnum.Current == null) { fl.DeleteMin(); } if (z.Value.InEnum.Current == null) { bl.DeleteMax(); } if (x.Value.InB) { f.ForEach(item => item.Value.InF = false); b.ForEach(item => item.Value.InB = false); return(false); // Pair(uz.from, x.Current); } else if (y.Value.InF) { f.ForEach(item => item.Value.InF = false); b.ForEach(item => item.Value.InB = false); return(false); // Pair(y.Current, uz.to); } if (!x.Value.InF) { f.Add(x); x.Value.InF = true; x.Value.OutEnum = x.Value.Outgoing.GetEnumerator(); x.Value.OutEnum.MoveNext(); if (x.Value.OutEnum.Current != null) { fl.Add(x); } } if (!y.Value.InB) { b.Add(y); y.Value.InB = true; y.Value.InEnum = y.Value.Incoming.GetEnumerator(); y.Value.InEnum.MoveNext(); if (y.Value.InEnum.Current != null) { bl.Add(y); } } // End of SEARCH-STEP(vertex u, vertex z) if (fl.Count > 0) { u = fl.FindMin(); } if (bl.Count > 0) { z = bl.FindMax(); } } // let t = min({v}∪{x ∈ F|out(x) = null} and reorder the vertices in F< and B> as discussed previously. var vAndf = f.FindAll(item => item.Value.OutEnum.Current != null); vAndf.Add(v); var t = vAndf.Min(); // Let F< = { x ∈ F | x < t} and var fb = f.FindAll(item => item.Label < t.Label); // B > = { y ∈ B | y > t}. var bf = b.FindAll(item => item.Label > t.Label); if (t == v) { // move all vertices in fb just after t ... bf is empty foreach (var node in fb) { _nodeOrder.Remove(node); } if (fb.Count > 1) { fb = TopoSort(fb); } if (fb.Count > 0) { var prev = _nodeOrder.insertAfter(t, fb[0]); for (int i = 1; i < fb.Count; i++) { prev = _nodeOrder.insertAfter(prev, fb[i]); } } } if (t.Label < v.Label) { // move all vertices in fb just before t and all vertices in bf just before all vertices in fb // This is required as the articles states if (bf.Count > 1) { bf = TopoSort(bf); } if (fb.Count > 1) { fb = TopoSort(fb); } foreach (var node in bf) { _nodeOrder.Remove(node); } foreach (var node in fb) { _nodeOrder.Remove(node); } foreach (var item in fb) { bf.Add(item); } if (bf.Count > 0) { var prev = _nodeOrder.insertBefore(t, bf[bf.Count - 1]); if (bf.Count > 1) { for (int i = bf.Count - 2; i >= 0; i--) { prev = _nodeOrder.insertBefore(prev, bf[i]); } } } } // reset bools f.ForEach(item => item.Value.InF = false); b.ForEach(item => item.Value.InB = false); // all done add to Outgoing and Incoming _nodes[iv].Value.Outgoing.Add(_nodes[iw]); _nodes[iw].Value.Incoming.Add(_nodes[iv]); return(true); }
public IReadOnlyCollection <TransactionReceipt> Peek(int txsToLook, int txsToTake, ulong era) { // Should we add lock (_transactions) here? Because old txes can be replaced by new ones Logger.LogTrace($"Proposing Transactions from pool"); // try sanitizing mempool ... lock (_toDeleteRepo) { SanitizeMemPool(era - 1); } // it's possible that block for this era is already persisted, // so we should return an empty set of transactions in this case if (era <= _lastSanitized) { return(new List <TransactionReceipt>()); } var rnd = new Random(); HashSet <UInt256> takenTxHashes = new HashSet <UInt256>(); lock (_transactions) { // take governance transaction from transaction queue foreach (var receipt in _transactionsQueue) { if (!IsGovernanceTx(receipt)) { continue; } var hash = receipt.Hash; if (takenTxHashes.Contains(hash) || !_transactions.ContainsKey(hash) || _transactionManager.GetByHash(hash) != null) { continue; } takenTxHashes.Add(hash); } if (takenTxHashes.Count >= txsToTake) { return(Take(takenTxHashes, era)); } // We first greedily take some most profitable transactions. Let's group by sender and // peek the best by gas price (so we do not break nonce order) var txsBySender = new Dictionary <UInt160, List <TransactionReceipt> >(); var orderedTransactionsQueue = _transactionsQueue.OrderBy(x => x, new ReceiptComparer()).Reverse(); foreach (var receipt in orderedTransactionsQueue) { if (IsGovernanceTx(receipt)) { continue; } var hash = receipt.Hash; if (takenTxHashes.Contains(hash) || !_transactions.ContainsKey(hash) || _transactionManager.GetByHash(hash) != null) { continue; } if (txsBySender.ContainsKey(receipt.Transaction.From)) { txsBySender[receipt.Transaction.From].Add(receipt); } else { txsBySender.Add(receipt.Transaction.From, new List <TransactionReceipt> { receipt }); } } // We maintain heap of current transaction for each sender var heap = new C5.IntervalHeap <TransactionReceipt>(new GasPriceReceiptComparer()); foreach (var txs in txsBySender.Values) { heap.Add(txs.Last()); } var bestTxs = new List <TransactionReceipt>(); for (var i = 0; i < txsToLook && !heap.IsEmpty; ++i) { var tx = heap.DeleteMax(); // peek best available tx bestTxs.Add(tx); var txsFrom = txsBySender[tx.Transaction.From]; txsFrom.RemoveAt(txsFrom.Count - 1); if (txsFrom.Count != 0) { // If there are more txs from this sender, add them to heap heap.Add(txsFrom.Last()); } } // Regroup transactions in order to take some random subset txsBySender = bestTxs .OrderBy(x => x, new ReceiptComparer()) .GroupBy(receipt => receipt.Transaction.From) .ToDictionary(receipts => receipts.Key, receipts => receipts.Reverse().ToList()); int alreadyTakenCount = takenTxHashes.Count; for (var i = 0; i < txsToTake - alreadyTakenCount && txsBySender.Count > 0; ++i) { var key = rnd.SelectRandom(txsBySender.Keys); var txsFrom = txsBySender[key]; var tx = txsFrom.Last(); takenTxHashes.Add(tx.Hash); txsFrom.RemoveAt(txsFrom.Count - 1); if (txsFrom.Count == 0) { txsBySender.Remove(key); } } return(Take(takenTxHashes, era)); } }