Beispiel #1
0
        /// <summary>
        /// Adds the provided item to the <see cref="ConcurrentIsolatedQueue{T}"/>. This method is thread safe.
        /// </summary>
        /// <param name="item">cannot be null</param>
        public void Enqueue(T item)
        {
            if (item == null)
            {
                throw new ArgumentNullException(nameof(item));
            }

            //I must acquire the current head before I increment m_enqueueCount
            //This effectively takes a snapshot before the increment operation occurs
            //If I waited until after the increment, then it's possible that multiple
            //nodes have passed and I'd never be able to find the one needed to
            //assign this item.
            IsolatedNode currentNode = m_currentHead;

            Thread.MemoryBarrier();

            long index         = Interlocked.Increment(ref m_enqueueCount) - 1;
            long positionIndex = index >> ShiftBits;
            int  arrayIndex    = (int)(index & BitMask);

            while (true)
            {
                // ReSharper disable once PossibleNullReferenceException
                if (currentNode.PositionIndex == positionIndex)
                {
                    currentNode.Slots[arrayIndex] = item;
                    return;
                }
                if (currentNode.NextNode == null)
                {
                    Grow(currentNode);
                }
                currentNode = currentNode.NextNode;
            }
        }
Beispiel #2
0
        public void Enqueue(T[] items, int offset, int length)
        {
            items.ValidateParameters(offset, length);
            while (true)
            {
                //If the header is empty. Get a new one.
                if (m_currentHead == null || !m_currentHead.CanEnqueue)
                {
                    m_currentHead = GetNode();
                    m_currentHead.Reset();
                    Thread.MemoryBarrier();
                    m_blocks.Enqueue(m_currentHead);
                    m_enqueueCount++;
                }

                //If the remainder will fit, queue it all.
                int enqueueLength = m_currentHead.AvailableEnqueueLength;
                if (length <= enqueueLength)
                {
                    m_currentHead.Enqueue(items, offset, length);
                    m_enqueueCount += length;
                    return;
                }

                //Some will not fit. Enqueue what will fit, then repeat.
                m_currentHead.Enqueue(items, offset, enqueueLength);
                m_enqueueCount += enqueueLength;
                offset         += enqueueLength;
                length         -= enqueueLength;
            }
        }
Beispiel #3
0
 /// <summary>
 /// Addes an item back to the queue.
 /// </summary>
 /// <param name="resource"></param>
 void ReleaseNode(IsolatedNode resource)
 {
     //if the queue has too many node items. just let the old one get garbage collected.
     if (m_pooledNodes.Count < 1000)
     {
         m_pooledNodes.Enqueue(resource);
     }
 }
Beispiel #4
0
 void EnqueueSlower(T item)
 {
     if (m_currentHead == null || !m_currentHead.CanEnqueue)
     {
         m_currentHead = new IsolatedNode(m_unitCount);
         Thread.MemoryBarrier();
         m_blocks.Enqueue(m_currentHead);
         m_enqueueCount++;
     }
     m_currentHead.Enqueue(item);
 }
Beispiel #5
0
 void EnqueueSlower(T item)
 {
     if (m_currentHead == null || !m_currentHead.CanEnqueue)
     {
         m_currentHead = GetNode();
         m_currentHead.Reset();
         Thread.MemoryBarrier();
         m_blocks.Enqueue(m_currentHead);
         m_enqueueCount++;
     }
     m_currentHead.Enqueue(item);
 }
Beispiel #6
0
        /// <summary>
        /// Attempts to dequeue the specified item from the <see cref="ConcurrentIsolatedQueue{T}"/>. This method is NOT thread safe.
        /// </summary>
        /// <param name="item">an output for the item</param>
        /// <returns></returns>
        /// <remarks>
        /// During a race condition, the queue might not be completely empty when TryDequeue returns false. Instead this method returns false
        /// rather than blocking and waiting on the race condition to satisfy.
        /// </remarks>
        public bool TryDequeue(out T item)
        {
            long positionIndex = m_dequeueCount >> ShiftBits;
            int  arrayIndex    = (int)(m_dequeueCount & BitMask);

TryAgain:
            if (m_currentTail.PositionIndex == positionIndex)
            {
                item = m_currentTail.Slots[arrayIndex];
                if (item == null)
                {
                    return(false);
                }

                m_currentTail.Slots[arrayIndex] = null;

                if (Environment.Is64BitProcess)
                {
                    m_dequeueCount++;
                }
                else
                {
                    Interlocked.Increment(ref m_dequeueCount);
                }

                return(true);
            }
            if (m_currentTail.NextNode == null)
            {
                item = null;
                return(false);
            }

            IsolatedNode node = m_currentTail;

            m_currentTail = m_currentTail.NextNode;
            node.NextNode = null;
            Pool.Enqueue(node);

            goto TryAgain;
        }
Beispiel #7
0
        private void Grow(IsolatedNode currentNode)
        {
            //Each thread is in a race to grow the current node. This is to ensure that
            //Enqueue will never block.
            //The person who wins the race will assign m_current with the node they placed.
            //while it's possible that a race condition could assign m_currentHead out of sequence
            //eventually it will catch back up and there will be little performance penalty in the mean time.

            //Note: There is a very small chance of ConcurrentQueue blocking here. But that's ok.
            IsolatedNode nextNode = Pool.Dequeue();

            nextNode.PositionIndex = currentNode.PositionIndex + 1;
            if (Interlocked.CompareExchange(ref currentNode.NextNode, nextNode, null) == null)
            {
                m_currentHead = nextNode;
            }
            else
            {
                Pool.Enqueue(nextNode);
            }
        }
Beispiel #8
0
 /// <summary>
 /// Creates an <see cref="IsolatedQueue{T}"/>
 /// </summary>
 public ConcurrentIsolatedQueue()
 {
     m_currentHead = m_currentTail = new IsolatedNode();
     m_currentHead.PositionIndex = 0;
 }