Esempio n. 1
0
        public void EnqueueMany(ArraySegment <T> data, bool notifyChanged = true)
        {
#if DEBUG
            if (IsMainThreadWorkItem && !IsMainThreadQueue)
            {
                throw new InvalidOperationException("This work item must be queued on the main thread");
            }
#endif

            lock (ItemsLock) {
                AssertCanEnqueue();
                Interlocked.Add(ref ItemsQueued, data.Count);

                var wi = new InternalWorkItem <T>();
                wi.Queue      = this;
                wi.OnComplete = null;
                for (var i = 0; i < data.Count; i++)
                {
                    wi.Data = data.Array[data.Offset + i];
                    AddInternal(ref wi);
                }
            }

            if (notifyChanged)
            {
                NotifyChanged();
            }
        }
Esempio n. 2
0
        public void Enqueue(ref T data, OnWorkItemComplete <T> onComplete = null, WorkQueueNotifyMode notifyChanged = WorkQueueNotifyMode.Always)
        {
#if DEBUG
            if (IsMainThreadWorkItem && !IsMainThreadQueue)
            {
                throw new InvalidOperationException("This work item must be queued on the main thread");
            }
#endif

            var wi       = new InternalWorkItem <T>(this, ref data, onComplete);
            var newCount = AddInternal(ref wi);
            NotifyChanged(notifyChanged, newCount, wakeAll: false);
        }
Esempio n. 3
0
        public static void Execute(ref InternalWorkItem <T> item)
        {
#if DEBUG
            if (!item.Valid)
            {
                throw new WorkQueueException(item.Queue, "Invalid work item");
            }
#endif
            item.Data.Execute();
            if (item.OnComplete != null)
            {
                item.OnComplete(ref item.Data);
            }
        }
Esempio n. 4
0
        public void EnqueueMany(ArraySegment <T> data)
        {
            lock (Queue) {
                ManageDrain();

                var wi = new InternalWorkItem <T>();
                wi.Queue      = this;
                wi.OnComplete = null;
                for (var i = 0; i < data.Count; i++)
                {
                    wi.Data = data.Array[data.Offset + i];
                    Queue.Enqueue(wi);
                }
            }
        }
Esempio n. 5
0
        private int AddInternal(ref InternalWorkItem <T> item)
        {
            AssertCanEnqueue();
            var result = Interlocked.Increment(ref ItemsQueued);

            lock (ItemsLock) {
                // Any existing read operations are irrelevant, we're inside the lock so we own the semilock state
                Volatile.Write(ref _Semilock, Semilock_Adding);
                EnsureCapacityLocked(_Count + 1);
                _Items[_Tail] = item;
                AdvanceLocked(ref _Tail);
                _Count++;
                Interlocked.CompareExchange(ref _Semilock, Semilock_Open, Semilock_Adding);
            }
            return(result);
        }
Esempio n. 6
0
        public int Step(out bool exhausted, int?maximumCount = null)
        {
            int result = 0, count = 0;
            int actualMaximumCount    = maximumCount.GetValueOrDefault(DefaultStepCount);
            InternalWorkItem <T> item = default(InternalWorkItem <T>);

            bool running = true;

            do
            {
                lock (Queue) {
                    running = ((count = Queue.Count) > 0) &&
                              (result < actualMaximumCount);

                    if (running)
                    {
                        InFlightTasks++;

                        item = Queue.Dequeue();
                    }
                }

                if (running)
                {
                    item.Data.Execute();
                    if (item.OnComplete != null)
                    {
                        item.OnComplete(ref item.Data);
                    }

                    result++;

                    lock (Queue) {
                        InFlightTasks--;
                        if ((Queue.Count == 0) && (InFlightTasks <= 0))
                        {
                            DrainComplete.Set();
                        }
                    }
                }
            } while (running);

            lock (Queue)
                exhausted = Queue.Count == 0;

            return(result);
        }
Esempio n. 7
0
        private bool TryDequeue(out InternalWorkItem <T> item, out bool empty)
        {
            // Attempt to transition the semilock into read mode, and as long as it wasn't in add mode,
            if (Interlocked.CompareExchange(ref _Semilock, Semilock_Reading, Semilock_Open) != 3)
            {
                // Determine whether we can early-out this dequeue operation because we successfully entered
                //  the semilock either during an enqueue or when it wasn't held at all
                // FIXME: Is it safe to do this check during a dequeue? I think so
                empty = Volatile.Read(ref _Count) <= 0;
                // We may have entered this block without acquiring the semilock in open mode, in which case
                //  whoever has it (in dequeue mode) will release it when they're done
                Interlocked.CompareExchange(ref _Semilock, Semilock_Open, Semilock_Reading);
                if (empty)
                {
                    item = default(InternalWorkItem <T>);
#if INSTRUMENT_FAST_PATH
                    Interlocked.Increment(ref EarlyOutCount);
#endif
                    return(false);
                }
            }

            lock (ItemsLock) {
                // We've successfully acquired the state lock, so we own the semilock state now
                Volatile.Write(ref _Semilock, Semilock_Dequeuing);
                if (_Count <= 0)
                {
                    empty = true;
                    item  = default(InternalWorkItem <T>);
#if INSTRUMENT_FAST_PATH
                    Interlocked.Increment(ref SlowOutCount);
#endif
                    Volatile.Write(ref _Semilock, Semilock_Open);
                    return(false);
                }

                item          = _Items[_Head];
                _Items[_Head] = default(InternalWorkItem <T>);
                AdvanceLocked(ref _Head);
                _Count--;
                empty = _Count <= 0;
                Volatile.Write(ref _Semilock, Semilock_Open);
                return(true);
            }
        }
Esempio n. 8
0
        private void GrowLocked(int capacity)
        {
            var newSize = UnorderedList <T> .PickGrowthSize(_Items.Length, capacity);

            var newItems = new InternalWorkItem <T> [newSize];

            if (_Count > 0)
            {
                if (_Head < _Tail)
                {
                    Array.Copy(_Items, _Head, newItems, 0, _Count);
                }
                else
                {
                    Array.Copy(_Items, _Head, newItems, 0, _Items.Length - _Head);
                    Array.Copy(_Items, 0, newItems, _Items.Length - _Head, _Tail);
                }
            }
            _Items = newItems;
            _Head  = 0;
            _Tail  = (_Count == capacity) ? 0 : _Count;
        }
Esempio n. 9
0
        private void StepInternal(out int result, out bool exhausted, int actualMaximumCount)
        {
            // We eat an extra lock acquisition this way, but it skips a lot of extra work
            // FIXME: Optimize this out since in profiles it eats like 2% of CPU, probably not worth it anymore
            if (IsEmpty)
            {
                result    = 0;
                exhausted = true;
                return;
            }

            InternalWorkItem <T> item = default(InternalWorkItem <T>);
            int  numProcessed = 0;
            bool running = true, signalDrained = false;

            var padded = Owner.Count - (Configuration.ConcurrencyPadding ?? Owner.DefaultConcurrencyPadding);
            var lesser = Math.Min(Configuration.MaxConcurrency ?? 9999, padded);
            var maxConcurrency = Math.Max(lesser, 1);

            exhausted = false;
            result    = 0;
            // TODO: Move this into the loop so we do it at the start of processing the first item?
            if (Interlocked.Increment(ref _NumProcessing) > maxConcurrency)
            {
                Interlocked.Decrement(ref _NumProcessing);
                return;
            }

            int processedCounter = -1;

            do
            {
                try {
                    bool empty = false;
                    running = (actualMaximumCount > 0) &&
                              (numProcessed < actualMaximumCount) &&
                              TryDequeue(out item, out empty);

                    if (empty)
                    {
                        signalDrained = true;
                        exhausted     = true;
                    }

                    if (running)
                    {
                        try {
                            numProcessed++;
                            InternalWorkItem <T> .Execute(ref item);

                            result++;
                        } finally {
                            processedCounter = Interlocked.Increment(ref ItemsProcessed);
                        }
                    }
                } catch (Exception exc) {
                    UnhandledException = ExceptionDispatchInfo.Capture(exc);
                    signalDrained      = true;
                    break;
                }
            } while (running);

            actualMaximumCount -= numProcessed;
            var wasLast = Interlocked.Decrement(ref _NumProcessing) == 0;

            // The following would be ideal but I think it could produce a hang in some cases

            /*
             * // This is a race, but that's probably OK since anyone waiting for the queue to drain will verify and spin if
             * //  we lose the race
             * var signalDone = wasLast && (Volatile.Read(ref ItemsQueued) <= processedCounter);
             */

            if (signalDrained)
            {
                // FIXME: Should we do this first? Assumption is that in a very bad case, the future's
                //  complete handler might begin waiting
                DrainedSignal.Set();
            }

            if (wasLast)
            {
                FinishedProcessingSignal.Set();
            }
        }