/// <summary> /// Removes the head of the queue (node with minimum priority; ties are broken by order of insertion), and sets it to first. /// Useful for multi-threading, where the queue may become empty between calls to Contains() and Dequeue() /// Returns true if successful; false if queue was empty /// O(log n) /// </summary> public bool TryDequeue(out TItem first) { lock (_queue) { if (_queue.Count <= 0) { first = default(TItem); return(false); } SimpleNode node = _queue.Dequeue(); first = node.Data; RemoveFromNodeCache(node); return(true); } }
/// <summary> /// Adds an item to the Node-cache to allow for many methods to be O(1) or O(log n) /// </summary> private void AddToNodeCache(SimpleNode node) { if (node.Data == null) { _nullNodesCache.Add(node); return; } IList <SimpleNode> nodes; if (!_itemToNodesCache.TryGetValue(node.Data, out nodes)) { nodes = new List <SimpleNode>(); _itemToNodesCache[node.Data] = nodes; } nodes.Add(node); }
/// <summary> /// Enqueue a node to the priority queue. Lower values are placed in front. Ties are broken by first-in-first-out. /// This queue automatically resizes itself, so there's no concern of the queue becoming 'full'. /// Duplicates and null-values are allowed. /// O(log n) /// </summary> public void Enqueue(TItem item, TPriority priority) { lock (_queue) { IList <SimpleNode> nodes; if (item == null) { nodes = _nullNodesCache; } else if (!_itemToNodesCache.TryGetValue(item, out nodes)) { nodes = new List <SimpleNode>(); _itemToNodesCache[item] = nodes; } SimpleNode node = EnqueueNoLockOrCache(item, priority); nodes.Add(node); } }
/// <summary> /// Removes an item to the Node-cache to allow for many methods to be O(1) or O(log n) (assuming no duplicates) /// </summary> private void RemoveFromNodeCache(SimpleNode node) { if (node.Data == null) { _nullNodesCache.Remove(node); return; } IList <SimpleNode> nodes; if (!_itemToNodesCache.TryGetValue(node.Data, out nodes)) { return; } nodes.Remove(node); if (nodes.Count == 0) { _itemToNodesCache.Remove(node.Data); } }