示例#1
0
        /// <summary>
        /// </summary>
        /// <param name="list"></param>
        /// <param name="item"></param>
        private void AddInternal(ThreadLocalList list, T item)
        {
            bool lockTaken = false;

            try
            {
#pragma warning disable 0420
                Interlocked.Exchange(ref list.m_currentOp, (int)ListOperation.Add);
#pragma warning restore 0420
                //Synchronization cases:
                // if the list count is less than two to avoid conflict with any stealing thread
                // if m_needSync is set, this means there is a thread that needs to freeze the bag
                if (list.Count < 2 || m_needSync)
                {
                    // reset it back to zero to avoid deadlock with stealing thread
                    list.m_currentOp = (int)ListOperation.None;
                    Monitor2.Enter(list, ref lockTaken);
                }
                list.Add(item, lockTaken);
            }
            finally
            {
                list.m_currentOp = (int)ListOperation.None;
                if (lockTaken)
                {
                    Monitor.Exit(list);
                }
            }
        }
        /// <summary>"Freezes" the bag, such that no concurrent operations will be mutating the bag when it returns.</summary>
        /// <param name="lockTaken">true if the global lock was taken; otherwise, false.</param>
        private void FreezeBag(ref bool lockTaken)
        {
            // Take the global lock to start freezing the bag.  This helps, for example,
            // to prevent other threads from joining the bag (adding their local queues)
            // while a global operation is in progress.
            //Debug.Assert(!Monitor.IsEntered(GlobalQueuesLock));
            Monitor2.Enter(GlobalQueuesLock, ref lockTaken);
            WorkStealingQueue head = _workStealingQueues; // stable at least until GlobalQueuesLock is released in UnfreezeBag

            // Then acquire all local queue locks, noting on each that it's been taken.
            for (WorkStealingQueue queue = head; queue != null; queue = queue._nextQueue)
            {
                Monitor2.Enter(queue, ref queue._frozen);
            }
            //Interlocked.MemoryBarrier(); // prevent reads of _currentOp from moving before writes to _frozen
            Thread.MemoryBarrier();

            // Finally, wait for all unsynchronized operations on each queue to be done.
            for (WorkStealingQueue queue = head; queue != null; queue = queue._nextQueue)
            {
                if (queue._currentOp != (int)Operation.None)
                {
                    var spinner = new SpinWait();
                    do
                    {
                        spinner.SpinOnce();
                    }while (queue._currentOp != (int)Operation.None);
                }
            }
        }
示例#3
0
        /// <summary>
        /// Local helper method to freeze all bag operations, it
        /// 1- Acquire the global lock to prevent any other thread to freeze the bag, and also new new thread can be added
        /// to the dictionary
        /// 2- Then Acquire all local lists locks to prevent steal and synchronized operations
        /// 3- Wait for all un-synchronized operations to be done
        /// </summary>
        /// <param name="lockTaken">Retrieve the lock taken result for the global lock, to be passed to Unfreeze method</param>
        private void FreezeBag(ref bool lockTaken)
        {
            // global lock to be safe against multi threads calls count and corrupt m_needSync
            Monitor2.Enter(m_globalListsLock, ref lockTaken);

            // This will force any future add/take operation to be synchronized
            m_needSync = true;

            //Acquire all local lists locks
            AcquireAllLocks();

            // Wait for all un-synchronized operation to be done
            WaitAllOperations();
        }
示例#4
0
 private void AcquireLocks(int fromInclusive, int toExclusive, ref int locksAcquired)
 {
     for (int index = fromInclusive; index < toExclusive; ++index)
     {
         bool taken = false;
         try
         {
             Monitor2.Enter(this.m_locks[index], ref taken);
         }
         finally
         {
             if (taken)
             {
                 ++locksAcquired;
             }
         }
     }
 }
示例#5
0
        /// <summary>
        /// local helper method to acquire all local lists locks
        /// </summary>
        private void AcquireAllLocks()
        {
            bool            lockTaken   = false;
            ThreadLocalList currentList = m_headList;

            while (currentList != null)
            {
                // Try/Finally bllock to avoid thread aport between acquiring the lock and setting the taken flag
                try
                {
                    Monitor2.Enter(currentList, ref lockTaken);
                }
                finally
                {
                    if (lockTaken)
                    {
                        currentList.m_lockTaken = true;
                        lockTaken = false;
                    }
                }
                currentList = currentList.m_nextList;
            }
        }
示例#6
0
        /// <summary>
        /// Local helper function to Take or Peek an item from the bag
        /// </summary>
        /// <param name="result">To receive the item retrieved from the bag</param>
        /// <param name="take">True means Take operation, false means Peek operation</param>
        /// <returns>True if succeeded, false otherwise</returns>
        private bool TryTakeOrPeek(out T result, bool take)
        {
            // Get the local list for that thread, return null if the thread doesn't exit
            //(this thread never add before)
            ThreadLocalList list = GetThreadList(false);

            if (list == null || list.Count == 0)
            {
                return(Steal(out result, take));
            }

            bool lockTaken = false;

            try
            {
                if (take) // Take operation
                {
#pragma warning disable 0420
                    Interlocked.Exchange(ref list.m_currentOp, (int)ListOperation.Take);
#pragma warning restore 0420
                    //Synchronization cases:
                    // if the list count is less than or equal two to avoid conflict with any stealing thread
                    // if m_needSync is set, this means there is a thread that needs to freeze the bag
                    if (list.Count <= 2 || m_needSync)
                    {
                        // reset it back to zero to avoid deadlock with stealing thread
                        list.m_currentOp = (int)ListOperation.None;
                        Monitor2.Enter(list, ref lockTaken);

                        // Double check the count and steal if it became empty
                        if (list.Count == 0)
                        {
                            // Release the lock before stealing
                            if (lockTaken)
                            {
                                try { }
                                finally
                                {
                                    lockTaken = false; // reset lockTaken to avoid calling Monitor.Exit again in the finally block
                                    Monitor.Exit(list);
                                }
                            }
                            return(Steal(out result, true));
                        }
                    }
                    list.Remove(out result);
                }
                else
                {
                    if (!list.Peek(out result))
                    {
                        return(Steal(out result, false));
                    }
                }
            }
            finally
            {
                list.m_currentOp = (int)ListOperation.None;
                if (lockTaken)
                {
                    Monitor.Exit(list);
                }
            }
            return(true);
        }
示例#7
0
        private bool TryAddInternal(TKey key, TValue value, bool updateIfExists, bool acquireLock, out TValue resultingValue)
        {
            int hashCode = this.m_comparer.GetHashCode(key);

label_1:
            ConcurrentDictionary <TKey, TValue> .Node[] buckets = this.m_buckets;
            int bucketNo;
            int lockNo;

            this.GetBucketAndLockNo(hashCode, out bucketNo, out lockNo, buckets.Length);
            bool flag  = false;
            bool taken = false;

            try
            {
                if (acquireLock)
                {
                    Monitor2.Enter(this.m_locks[lockNo], ref taken);
                }
                if (buckets == this.m_buckets)
                {
                    ConcurrentDictionary <TKey, TValue> .Node node1 = (ConcurrentDictionary <TKey, TValue> .Node)null;
                    for (ConcurrentDictionary <TKey, TValue> .Node next = buckets[bucketNo]; next != null; next = next.m_next)
                    {
                        if (this.m_comparer.Equals(next.m_key, key))
                        {
                            if (updateIfExists)
                            {
                                ConcurrentDictionary <TKey, TValue> .Node node2 = new ConcurrentDictionary <TKey, TValue> .Node(next.m_key, value, hashCode, next.m_next);

                                if (node1 == null)
                                {
                                    buckets[bucketNo] = node2;
                                }
                                else
                                {
                                    node1.m_next = node2;
                                }
                                resultingValue = value;
                            }
                            else
                            {
                                resultingValue = next.m_value;
                            }
                            return(false);
                        }
                        node1 = next;
                    }
                    buckets[bucketNo] = new ConcurrentDictionary <TKey, TValue> .Node(key, value, hashCode, buckets[bucketNo]);

                    checked { ++this.m_countPerLock[lockNo]; }
                    if (this.m_countPerLock[lockNo] > buckets.Length / this.m_locks.Length)
                    {
                        flag = true;
                    }
                }
                else
                {
                    goto label_1;
                }
            }
            finally
            {
                if (taken)
                {
                    Monitor.Exit(this.m_locks[lockNo]);
                }
            }
            if (flag)
            {
                this.GrowTable(buckets);
            }
            resultingValue = value;
            return(true);
        }
            /// <summary>Remove an item from the tail of the queue.</summary>
            /// <param name="result">The removed item</param>
            internal bool TryLocalPop(out T result)
            {
                Debug.Assert(Environment2.CurrentManagedThreadId == _ownerThreadId);

                int tail = _tailIndex;

                if (_headIndex >= tail)
                {
                    result = default(T);
                    return(false);
                }

                bool lockTaken = false;

                try
                {
                    // Decrement the tail using a full fence to ensure subsequent reads don't reorder before this.
                    // If the read of _headIndex moved before this write to _tailIndex, we could erroneously end up
                    // popping an element that's concurrently being stolen, leading to the same element being
                    // dequeued from the bag twice.
                    _currentOp = (int)Operation.Take;
                    Interlocked.Exchange(ref _tailIndex, --tail);

                    // If there is no interaction with a steal, we can head down the fast path.
                    // Note that we use _headIndex < tail rather than _headIndex <= tail to account
                    // for stealing peeks, which don't increment _headIndex, and which could observe
                    // the written default(T) in a race condition to peek at the element.
                    if (!_frozen && _headIndex < tail)
                    {
                        int idx = tail & _mask;
                        result      = _array[idx];
                        _array[idx] = default(T);
                        _addTakeCount--;
                        return(true);
                    }
                    else
                    {
                        // Interaction with steals: 0 or 1 elements left.
                        _currentOp = (int)Operation.None; // set back to None to avoid a deadlock
                        Monitor2.Enter(this, ref lockTaken);
                        if (_headIndex <= tail)
                        {
                            // Element still available. Take it.
                            int idx = tail & _mask;
                            result      = _array[idx];
                            _array[idx] = default(T);
                            _addTakeCount--;
                            return(true);
                        }
                        else
                        {
                            // We encountered a race condition and the element was stolen, restore the tail.
                            _tailIndex = tail + 1;
                            result     = default(T);
                            return(false);
                        }
                    }
                }
                finally
                {
                    _currentOp = (int)Operation.None;
                    if (lockTaken)
                    {
                        Monitor.Exit(this);
                    }
                }
            }
            /// <summary>
            /// Add new item to the tail of the queue.
            /// </summary>
            /// <param name="item">The item to add.</param>
            internal void LocalPush(T item)
            {
                Debug.Assert(Environment2.CurrentManagedThreadId == _ownerThreadId);
                bool lockTaken = false;

                try
                {
                    // Full fence to ensure subsequent reads don't get reordered before this
                    Interlocked.Exchange(ref _currentOp, (int)Operation.Add);
                    int tail = _tailIndex;

                    // Rare corner case (at most once every 2 billion pushes on this thread):
                    // We're going to increment the tail; if we'll overflow, then we need to reset our counts
                    if (tail == int.MaxValue)
                    {
                        _currentOp = (int)Operation.None; // set back to None temporarily to avoid a deadlock
                        lock (this)
                        {
                            Debug.Assert(_tailIndex == int.MaxValue, "No other thread should be changing _tailIndex");

                            // Rather than resetting to zero, we'll just mask off the bits we don't care about.
                            // This way we don't need to rearrange the items already in the queue; they'll be found
                            // correctly exactly where they are.  One subtlety here is that we need to make sure that
                            // if head is currently < tail, it remains that way.  This happens to just fall out from
                            // the bit-masking, because we only do this if tail == int.MaxValue, meaning that all
                            // bits are set, so all of the bits we're keeping will also be set.  Thus it's impossible
                            // for the head to end up > than the tail, since you can't set any more bits than all of them.
                            _headIndex = _headIndex & _mask;
                            _tailIndex = tail = _tailIndex & _mask;
                            Debug.Assert(_headIndex <= _tailIndex);

                            _currentOp = (int)Operation.Add;
                        }
                    }

                    // We'd like to take the fast path that doesn't require locking, if possible. It's not possible if another
                    // thread is currently requesting that the whole bag synchronize, e.g. a ToArray operation.  It's also
                    // not possible if there are fewer than two spaces available.  One space is necessary for obvious reasons:
                    // to store the element we're trying to push.  The other is necessary due to synchronization with steals.
                    // A stealing thread first increments _headIndex to reserve the slot at its old value, and then tries to
                    // read from that slot.  We could potentially have a race condition whereby _headIndex is incremented just
                    // before this check, in which case we could overwrite the element being stolen as that slot would appear
                    // to be empty.  Thus, we only allow the fast path if there are two empty slots.
                    if (!_frozen && tail < (_headIndex + _mask))
                    {
                        _array[tail & _mask] = item;
                        _tailIndex           = tail + 1;
                    }
                    else
                    {
                        // We need to contend with foreign operations (e.g. steals, enumeration, etc.), so we lock.
                        _currentOp = (int)Operation.None; // set back to None to avoid a deadlock
                        Monitor2.Enter(this, ref lockTaken);

                        int head  = _headIndex;
                        int count = _tailIndex - _headIndex;

                        // If we're full, expand the array.
                        if (count >= _mask)
                        {
                            // Expand the queue by doubling its size.
                            var newArray = new T[_array.Length << 1];
                            int headIdx  = head & _mask;
                            if (headIdx == 0)
                            {
                                Array.Copy(_array, 0, newArray, 0, _array.Length);
                            }
                            else
                            {
                                Array.Copy(_array, headIdx, newArray, 0, _array.Length - headIdx);
                                Array.Copy(_array, 0, newArray, _array.Length - headIdx, headIdx);
                            }

                            // Reset the field values
                            _array     = newArray;
                            _headIndex = 0;
                            _tailIndex = tail = count;
                            _mask      = (_mask << 1) | 1;
                        }

                        // Add the element
                        _array[tail & _mask] = item;
                        _tailIndex           = tail + 1;

                        // Update the count to avoid overflow.  We can trust _stealCount here,
                        // as we're inside the lock and it's only manipulated there.
                        _addTakeCount -= _stealCount;
                        _stealCount    = 0;
                    }

                    // Increment the count from the add/take perspective
                    checked { _addTakeCount++; }
                }
                finally
                {
                    _currentOp = (int)Operation.None;
                    if (lockTaken)
                    {
                        Monitor.Exit(this);
                    }
                }
            }