public bool WaitUntilDrained(int timeoutMs = -1) { if (IsEmpty) { return(true); } var resultCount = Interlocked.Increment(ref NumWaitingForDrain); long endWhen = timeoutMs >= 0 ? Time.Ticks + (Time.MillisecondInTicks * timeoutMs) : long.MaxValue - 1; bool doWait = false; var waterMark = ItemsQueued; try { do { lock (ItemsLock) { // Read these first and read the queued count last so that if we lose a race // we will lose it in the 'there's extra work' fashion instead of 'we're done // but not really' fashion var processed = Volatile.Read(ref ItemsProcessed); var queued = Volatile.Read(ref ItemsQueued); doWait = (processed < queued) || (_Count > 0); } if (doWait) { var now = Time.Ticks; if (now > endWhen) { break; } var maxWait = (timeoutMs <= 0) ? timeoutMs : (int)(Math.Max(0, endWhen - now) / Time.MillisecondInTicks); NotifyChanged(); if (DrainedSignal.Wait(maxWait)) { // We successfully got the drain signal, now wait for the 'processing is probably done' signal now = Time.Ticks; maxWait = (timeoutMs <= 0) ? timeoutMs : (int)(Math.Max(0, endWhen - now) / Time.MillisecondInTicks); if (now > endWhen) { break; } // Note that we may still spin after getting this signal because it will fire when all the workers // stop running, but that doesn't necessarily mean all the work is done if (FinishedProcessingSignal.Wait(maxWait)) { FinishedProcessingSignal.Reset(); DrainedSignal.Reset(); } } } else { #if DEBUG if (!IsEmpty) { throw new WorkQueueException(this, "Queue is not empty"); } Thread.Yield(); if (!IsEmpty) { throw new WorkQueueException(this, "Queue is not empty"); } if (ItemsProcessed < waterMark) { throw new WorkQueueException(this, "AssertDrained returned before reaching watermark"); } #endif return(ItemsProcessed >= waterMark); } } while (true); } finally { Interlocked.Decrement(ref NumWaitingForDrain); var uhe = Interlocked.Exchange(ref UnhandledException, null); if (uhe != null) { uhe.Throw(); } } return(false); }
private void StepInternal(out int result, out bool exhausted, int actualMaximumCount) { // We eat an extra lock acquisition this way, but it skips a lot of extra work // FIXME: Optimize this out since in profiles it eats like 2% of CPU, probably not worth it anymore if (IsEmpty) { result = 0; exhausted = true; return; } InternalWorkItem <T> item = default(InternalWorkItem <T>); int numProcessed = 0; bool running = true, signalDrained = false; var padded = Owner.Count - (Configuration.ConcurrencyPadding ?? Owner.DefaultConcurrencyPadding); var lesser = Math.Min(Configuration.MaxConcurrency ?? 9999, padded); var maxConcurrency = Math.Max(lesser, 1); exhausted = false; result = 0; // TODO: Move this into the loop so we do it at the start of processing the first item? if (Interlocked.Increment(ref _NumProcessing) > maxConcurrency) { Interlocked.Decrement(ref _NumProcessing); return; } int processedCounter = -1; do { try { bool empty = false; running = (actualMaximumCount > 0) && (numProcessed < actualMaximumCount) && TryDequeue(out item, out empty); if (empty) { signalDrained = true; exhausted = true; } if (running) { try { numProcessed++; InternalWorkItem <T> .Execute(ref item); result++; } finally { processedCounter = Interlocked.Increment(ref ItemsProcessed); } } } catch (Exception exc) { UnhandledException = ExceptionDispatchInfo.Capture(exc); signalDrained = true; break; } } while (running); actualMaximumCount -= numProcessed; var wasLast = Interlocked.Decrement(ref _NumProcessing) == 0; // The following would be ideal but I think it could produce a hang in some cases /* * // This is a race, but that's probably OK since anyone waiting for the queue to drain will verify and spin if * // we lose the race * var signalDone = wasLast && (Volatile.Read(ref ItemsQueued) <= processedCounter); */ if (signalDrained) { // FIXME: Should we do this first? Assumption is that in a very bad case, the future's // complete handler might begin waiting DrainedSignal.Set(); } if (wasLast) { FinishedProcessingSignal.Set(); } }