예제 #1
0
        /// <summary>
        /// setup for threads
        /// </summary>
        public void GenerateThreads()
        {
            try
            {
                int i = 1;
                foreach (Reactor reactor in this.reactors)
                {
                    switch (reactor.selectedThreadingType)
                    {
                    case ThreadingType.SingleThreading:
                        reactor.ExecuteThread();
                        break;

                    case ThreadingType.MultiThreading:
                        Thread thread = new Thread(reactor.ExecuteThread)
                        {
                            Name = reactor.ToString() + i
                        };
                        thread.Start();
                        break;

                    case ThreadingType.ThreadPool:
                        ThreadPool.QueueUserWorkItem(reactor.ThreadProcess);
                        break;
                    }
                    Thread.Sleep(100);
                    i++;
                }
            }
            catch (Exception e)
            {
                Debug.WriteLine("\n\n\n\nException\nmessage: {0}\nStacktrace: {1}", e.Message, e.StackTrace);
            }
        }
 public void Queued_And_Then_Left_Over_A_Period_Of_Time_With_Queue_Per_Worker()
 {
     MessageConsumer consumer = new MessageConsumer();
       ThreadPool pool = new ThreadPool(ThreadPoolConfiguration.FiveAndTen, new QueuePerWorkerStrategy());
       pool.Start();
       foreach (Message message in MessageBuilder.MakeMessages(30))
       {
     pool.Queue(consumer, message);
       }
       Thread.Sleep(TimeSpan.FromSeconds(10.0));
       pool.Stop();
       Assert.AreEqual(30, consumer.NumberOfMessagesProcessed);
 }
 public void Queued_Randomly_For_Twenty_Seconds()
 {
     ThreadPool pool = new ThreadPool(ThreadPoolConfiguration.FiveAndTen, new SingleQueueStrategy());
       pool.Start();
       DateTime startedAt = DateTime.Now;
       while (DateTime.Now - startedAt < TimeSpan.FromSeconds(10.0))
       {
     foreach (Message message in MessageBuilder.MakeMessages(Random.Next(10)))
     {
       pool.Queue(new MessageConsumer(), message);
     }
     Thread.Sleep(TimeSpan.FromSeconds(Random.NextDouble()));
       }
       pool.Stop();
 }
예제 #4
0
 private static ThreadPool GetThreadPool(ThreadPoolName uniqueId)
 {
     uniqueId = TranslatePool(uniqueId);
     ThreadPool currentPool;
     lock (threadPool)
     {
         if (!threadPool.TryGetValue(uniqueId, out currentPool))
         {
             currentPool = new ThreadPool(uniqueId.ToString());
             threadPool[uniqueId] = currentPool;
             currentPool.SetMaxThreads(10);
         }
     }
     return currentPool;
 }
예제 #5
0
 private static ThreadPool GetThreadPool(string uniqueId)
 {
     ThreadPool currentPool;
     lock (threadPool)
     {
         if (!threadPool.TryGetValue(uniqueId, out currentPool))
         {
             currentPool = new ThreadPool(uniqueId);
             threadPool[uniqueId] = currentPool;
         }
     }
     return currentPool;
 }
        public BackgroundEffectRenderer(Effect effect,
            EffectConfigToken effectToken,
            RenderArgs dstArgs,
            RenderArgs srcArgs,
            PdnRegion renderRegion,
            int tileCount,
            int workerThreads)
        {
            this.effect = effect;
            this.effectToken = effectToken;
            this.dstArgs = dstArgs;
            this.srcArgs = srcArgs;
            this.renderRegion = renderRegion;
            this.renderRegion.Intersect(dstArgs.Bounds);
            this.tileRegions = SliceUpRegion(renderRegion, tileCount, dstArgs.Bounds);

            this.tilePdnRegions = new PdnRegion[this.tileRegions.Length];
            for (int i = 0; i < this.tileRegions.Length; ++i)
            {
                PdnRegion pdnRegion = Utility.RectanglesToRegion(this.tileRegions[i]);
                this.tilePdnRegions[i] = pdnRegion;
            }

            this.tileCount = tileCount;
            this.workerThreads = workerThreads;

            if ((effect.EffectDirectives & EffectDirectives.SingleThreaded) != 0)
            {
                this.workerThreads = 1;
            }

            this.threadPool = new Threading.ThreadPool(this.workerThreads, false);
        }
예제 #7
0
파일: Watchdog.cs 프로젝트: abombss/machine
 public Watchdog(IThreadManager threadManager, ThreadPool pool)
 {
     _threadManager = threadManager;
       _pool = pool;
 }
예제 #8
0
 private static void QueueTimerCompletion(TimerQueueTimer timer)
 {
     // Can use "unsafe" variant because we take care of capturing and restoring the ExecutionContext.
     ThreadPool.UnsafeQueueCustomWorkItem(timer, forceGlobal: true);
 }
예제 #9
0
 public EngineShutdown(RoomieEngine engine)
 {
     _engine = engine;
     _threadpool = _engine.CreateThreadPool("Shutdown Tasks");
 }
예제 #10
0
파일: Timer.cs 프로젝트: xwyangjshb/mono
        // Fire any timers that have expired, and update the native timer to schedule the rest of them.
        // We're in a thread pool work item here, and if there are multiple timers to be fired, we want
        // to queue all but the first one.  The first may can then be invoked synchronously or queued,
        // a task left up to our caller, which might be firing timers from multiple queues.
        private void FireNextTimers()
        {
            // We fire the first timer on this thread; any other timers that need to be fired
            // are queued to the ThreadPool.
            TimerQueueTimer timerToFireOnThisThread = null;

            lock (this)
            {
                // Since we got here, that means our previous timer has fired.
                _isTimerScheduled = false;
                bool haveTimerToSchedule = false;
                uint nextTimerDuration   = uint.MaxValue;

                int nowTicks = TickCount;

                // Sweep through the "short" timers.  If the current tick count is greater than
                // the current threshold, also sweep through the "long" timers.  Finally, as part
                // of sweeping the long timers, move anything that'll fire within the next threshold
                // to the short list.  It's functionally ok if more timers end up in the short list
                // than is truly necessary (but not the opposite).
                TimerQueueTimer timer = _shortTimers;
                for (int listNum = 0; listNum < 2; listNum++) // short == 0, long == 1
                {
                    while (timer != null)
                    {
                        Debug.Assert(timer._dueTime != Timeout.UnsignedInfinite, "A timer in the list must have a valid due time.");

                        // Save off the next timer to examine, in case our examination of this timer results
                        // in our deleting or moving it; we'll continue after with this saved next timer.
                        TimerQueueTimer next = timer._next;

                        uint elapsed   = (uint)(nowTicks - timer._startTicks);
                        int  remaining = (int)timer._dueTime - (int)elapsed;
                        if (remaining <= 0)
                        {
                            // Timer is ready to fire.

                            if (timer._period != Timeout.UnsignedInfinite)
                            {
                                // This is a repeating timer; schedule it to run again.

                                // Discount the extra amount of time that has elapsed since the previous firing time to
                                // prevent timer ticks from drifting.  If enough time has already elapsed for the timer to fire
                                // again, meaning the timer can't keep up with the short period, have it fire 1 ms from now to
                                // avoid spinning without a delay.
                                timer._startTicks = nowTicks;
                                uint elapsedForNextDueTime = elapsed - timer._dueTime;
                                timer._dueTime = (elapsedForNextDueTime < timer._period) ?
                                                 timer._period - elapsedForNextDueTime :
                                                 1;

                                // Update the timer if this becomes the next timer to fire.
                                if (timer._dueTime < nextTimerDuration)
                                {
                                    haveTimerToSchedule = true;
                                    nextTimerDuration   = timer._dueTime;
                                }

                                // Validate that the repeating timer is still on the right list.  It's likely that
                                // it started in the long list and was moved to the short list at some point, so
                                // we now want to move it back to the long list if that's where it belongs. Note that
                                // if we're currently processing the short list and move it to the long list, we may
                                // end up revisiting it again if we also enumerate the long list, but we will have already
                                // updated the due time appropriately so that we won't fire it again (it's also possible
                                // but rare that we could be moving a timer from the long list to the short list here,
                                // if the initial due time was set to be long but the timer then had a short period).
                                bool targetShortList = (nowTicks + timer._dueTime) - _currentAbsoluteThreshold <= 0;
                                if (timer._short != targetShortList)
                                {
                                    MoveTimerToCorrectList(timer, targetShortList);
                                }
                            }
                            else
                            {
                                // Not repeating; remove it from the queue
                                DeleteTimer(timer);
                            }

                            // If this is the first timer, we'll fire it on this thread (after processing
                            // all others). Otherwise, queue it to the ThreadPool.
                            if (timerToFireOnThisThread == null)
                            {
                                timerToFireOnThisThread = timer;
                            }
                            else
                            {
                                ThreadPool.UnsafeQueueUserWorkItemInternal(timer, preferLocal: false);
                            }
                        }
                        else
                        {
                            // This timer isn't ready to fire.  Update the next time the native timer fires if necessary,
                            // and move this timer to the short list if its remaining time is now at or under the threshold.

                            if (remaining < nextTimerDuration)
                            {
                                haveTimerToSchedule = true;
                                nextTimerDuration   = (uint)remaining;
                            }

                            if (!timer._short && remaining <= ShortTimersThresholdMilliseconds)
                            {
                                MoveTimerToCorrectList(timer, shortList: true);
                            }
                        }

                        timer = next;
                    }

                    // Switch to process the long list if necessary.
                    if (listNum == 0)
                    {
                        // Determine how much time remains between now and the current threshold.  If time remains,
                        // we can skip processing the long list.  We use > rather than >= because, although we
                        // know that if remaining == 0 no timers in the long list will need to be fired, we
                        // don't know without looking at them when we'll need to call FireNextTimers again.  We
                        // could in that case just set the next firing to 1, but we may as well just iterate the
                        // long list now; otherwise, most timers created in the interim would end up in the long
                        // list and we'd likely end up paying for another invocation of FireNextTimers that could
                        // have been delayed longer (to whatever is the current minimum in the long list).
                        int remaining = _currentAbsoluteThreshold - nowTicks;
                        if (remaining > 0)
                        {
                            if (_shortTimers == null && _longTimers != null)
                            {
                                // We don't have any short timers left and we haven't examined the long list,
                                // which means we likely don't have an accurate nextTimerDuration.
                                // But we do know that nothing in the long list will be firing before or at _currentAbsoluteThreshold,
                                // so we can just set nextTimerDuration to the difference between then and now.
                                nextTimerDuration   = (uint)remaining + 1;
                                haveTimerToSchedule = true;
                            }
                            break;
                        }

                        // Switch to processing the long list.
                        timer = _longTimers;

                        // Now that we're going to process the long list, update the current threshold.
                        _currentAbsoluteThreshold = nowTicks + ShortTimersThresholdMilliseconds;
                    }
                }

                // If we still have scheduled timers, update the timer to ensure it fires
                // in time for the next one in line.
                if (haveTimerToSchedule)
                {
                    EnsureTimerFiresBy(nextTimerDuration);
                }
            }

            // Fire the user timer outside of the lock!
            timerToFireOnThisThread?.Fire();
        }
예제 #11
0
 public virtual void Post(SendOrPostCallback d, Object state)
 {
     ThreadPool.QueueUserWorkItem(new WaitCallback(d), state);
 }
예제 #12
0
        public bool SetMinThreads(int workerThreads, int ioCompletionThreads)
        {
            if (workerThreads < 0 || ioCompletionThreads < 0)
            {
                return(false);
            }

            bool addWorker      = false;
            bool wakeGateThread = false;

            _threadAdjustmentLock.Acquire();
            try
            {
                if (workerThreads > _maxThreads)
                {
                    return(false);
                }

                if (ThreadPool.UsePortableThreadPoolForIO
                        ? ioCompletionThreads > _legacy_maxIOCompletionThreads
                        : !ThreadPool.CanSetMinIOCompletionThreads(ioCompletionThreads))
                {
                    return(false);
                }

                if (HasForcedMinThreads && workerThreads != ForcedMinWorkerThreads)
                {
                    return(false);
                }

                if (ThreadPool.UsePortableThreadPoolForIO)
                {
                    _legacy_minIOCompletionThreads = (short)Math.Max(1, ioCompletionThreads);
                }
                else
                {
                    ThreadPool.SetMinIOCompletionThreads(ioCompletionThreads);
                }

                short newMinThreads = (short)Math.Max(1, workerThreads);
                if (newMinThreads == _minThreads)
                {
                    return(true);
                }

                _minThreads = newMinThreads;
                if (_numBlockedThreads > 0)
                {
                    // Blocking adjustment will adjust the goal according to its heuristics
                    if (_pendingBlockingAdjustment != PendingBlockingAdjustment.Immediately)
                    {
                        _pendingBlockingAdjustment = PendingBlockingAdjustment.Immediately;
                        wakeGateThread             = true;
                    }
                }
                else if (_separated.counts.NumThreadsGoal < newMinThreads)
                {
                    _separated.counts.InterlockedSetNumThreadsGoal(newMinThreads);
                    if (_separated.numRequestedWorkers > 0)
                    {
                        addWorker = true;
                    }
                }
            }
            finally
            {
                _threadAdjustmentLock.Release();
            }

            if (addWorker)
            {
                WorkerThread.MaybeAddWorkingWorker(this);
            }
            else if (wakeGateThread)
            {
                GateThread.Wake(this);
            }
            return(true);
        }
        public void TestThreadPool()
        {
            threadPool = new ThreadPool(Environment.ProcessorCount, "testWorker");

            var ssw = new SplitStopwatch();
            ssw.Start("starting to enqueue...");

            var wir1 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            var wir2 = threadPool.EnqueueWorkItem(CalcAverage, new[] {1, 1, 1, 1});
            var wir3 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            var wir4 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            var wir5 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            var wir6 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            var wir7 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            var wir8 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            var wir9 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            var wir10 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});

            ssw.Split("all items are enqueued...");

            var average = wir1.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 1...");
            wir1.Dispose();

            average = wir2.Result;
            Assert.AreEqual(average, 1.0);
            ssw.Split("we waited for result 2...");
            wir2.Dispose();

            average = wir3.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 3...");
            wir3.Dispose();

            average = wir4.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 4...");
            wir4.Dispose();

            average = wir5.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 5...");
            wir5.Dispose();

            average = wir6.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 6...");
            wir6.Dispose();

            average = wir7.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 7...");
            wir7.Dispose();

            average = wir8.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 8...");
            wir8.Dispose();

            average = wir9.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 9...");
            wir9.Dispose();

            average = wir10.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 10...");
            wir10.Dispose();

            var wir11 = threadPool.EnqueueWorkItem(CalcAverage, new[] {2, 3, 2, 5});
            average = wir11.Result;
            Assert.AreEqual(average, 3.0);
            ssw.Split("we waited for result 11...");
            wir11.Dispose();

            threadPool.ShutDown();
        }
        public void TestThreadPoolManyThreadsActionTNumberOfCalls()
        {
            threadPool = new ThreadPool(Environment.ProcessorCount, "testWorker");

            var wirs = new List<IWorkItemState>();
            var objects = new List<ThreadingThreadpoolTestObject>();

            for (var j = 0; j < 5000; j++)
            {
                wirs.Clear();
                objects.Clear();
                for (var i = 0; i < 100; i++)
                {
                    var o = new ThreadingThreadpoolTestObject();
                    objects.Add(o);
                    var wir = threadPool.EnqueueWorkItem(o.Call);
                    wirs.Add(wir);
                }

                foreach (var workItemState in wirs)
                {
                    workItemState.Result();
                    workItemState.Dispose();
                }

                foreach (var threadingThreadpoolTestObject in objects)
                {
                    Assert.AreEqual(1, threadingThreadpoolTestObject.NumberOfCalls,
                        "One of the objects has been called " + threadingThreadpoolTestObject.NumberOfCalls +
                        " times. It was expected to be called once only and exactly once.");
                }
            }

            threadPool.ShutDown();
        }
        public void TestThreadPoolPerformance()
        {
            const int numberOfOperations = 100000;
            var ssw = new SplitStopwatch();

            // Test without any threadPool.
            ssw.Start("SINGLE THREADED WITHOUT POOL:");
            for (var i = 0; i < numberOfOperations; i++)
            {
                CalcAverage(GetNumbersForAverage());
            }
            ssw.Stop("Done.", 1);
            Console.Out.WriteLine(string.Empty);
            ssw.Reset();

            for (var numberOfWorkerThreads = 1;
                numberOfWorkerThreads < Environment.ProcessorCount*2;
                numberOfWorkerThreads++)
            {
                ssw.Start("THREADPOOL (" + numberOfWorkerThreads + " workerThreads):");
                threadPool = new ThreadPool(numberOfWorkerThreads, "testWorker");
                ssw.Split("Starting to enqueue.", 1);

                for (var i = 0; i < numberOfOperations; i++)
                {
                    threadPool.EnqueueWorkItem(CalcAverage, GetNumbersForAverage());
                }

                ssw.Split("All items are enqueued.", 1);
                threadPool.WaitForEveryWorkerIdle();
                threadPool.ShutDown();

                ssw.Stop("Done.", 1);
                Console.Out.WriteLine(string.Empty);
                ssw.Reset();
            }
        }
        public void TestThreadPoolPausedAndResumed()
        {
            threadPool = new ThreadPool(Environment.ProcessorCount, "testWorker");

            var wirs = new List<IWorkItemState>();

            for (var i = 0; i < 10; i++)
            {
                if (i == 5)
                {
                    threadPool.Sleep();
                    Thread.Sleep(2000);
                }
                var wir = threadPool.EnqueueWorkItem(WriteToConsole, "test " + (i + 1));
                wirs.Add(wir);
            }

            threadPool.Wakeup();

            foreach (var workItemState in wirs)
            {
                workItemState.Result();
                workItemState.Dispose();
            }

            threadPool.ShutDown();
        }
        public void TestThreadPoolManyWorkItemsSingleThread()
        {
            threadPool = new ThreadPool(1, "testWorker");

            var wirs = new List<IWorkItemState<bool>>();

            for (var i = 0; i < 50; i++)
            {
                var wir = threadPool.EnqueueWorkItem(Not, i%2 == 1);
                wirs.Add(wir);
            }

            var anticipatedResult = true;
            foreach (var workItemState in wirs)
            {
                var result = workItemState.Result;
                workItemState.Dispose();

                Assert.AreEqual(result, anticipatedResult);
                anticipatedResult = !anticipatedResult;
            }

            threadPool.ShutDown();
        }
        public void TestThreadPoolManyThreadsFuncT()
        {
            threadPool = new ThreadPool(Environment.ProcessorCount, "testWorker");

            var wirs = new List<IWorkItemState<bool>>();

            for (var i = 0; i < 50; i++)
            {
                var wir = threadPool.EnqueueWorkItem(Not, true);
                wirs.Add(wir);
            }

            foreach (var workItemState in wirs)
            {
                var result = workItemState.Result;
                workItemState.Dispose();

                Assert.AreEqual(result, false);
            }

            threadPool.ShutDown();
        }
        public void TestThreadPoolManyThreadsActionTWithCallback()
        {
            threadPool = new ThreadPool(Environment.ProcessorCount, "testWorker");

            var wirs = new List<IWorkItemState>();

            for (var i = 0; i < 50; i++)
            {
                var wir = threadPool.EnqueueWorkItem(WriteToConsole, "test", CallbackFunction);
                wirs.Add(wir);
            }

            foreach (var workItemState in wirs)
            {
                workItemState.Result();
                workItemState.Dispose();
            }

            threadPool.ShutDown();
        }
예제 #20
0
 public virtual void Post(SendOrPostCallback d, Object state)
 {
     ThreadPool.QueueUserWorkItem(s => s.d(s.state), (d, state), preferLocal: false);
 }
예제 #21
0
            private static void GateThreadStart()
            {
                bool disableStarvationDetection =
                    AppContextConfigHelper.GetBooleanConfig("System.Threading.ThreadPool.DisableStarvationDetection", false);
                bool debuggerBreakOnWorkStarvation =
                    AppContextConfigHelper.GetBooleanConfig("System.Threading.ThreadPool.DebugBreakOnWorkerStarvation", false);

                // The first reading is over a time range other than what we are focusing on, so we do not use the read other
                // than to send it to any runtime-specific implementation that may also use the CPU utilization.
                CpuUtilizationReader cpuUtilizationReader = default;

                _ = cpuUtilizationReader.CurrentUtilization;

                PortableThreadPool threadPoolInstance   = ThreadPoolInstance;
                LowLevelLock       threadAdjustmentLock = threadPoolInstance._threadAdjustmentLock;
                DelayHelper        delayHelper          = default;

                if (BlockingConfig.IsCooperativeBlockingEnabled)
                {
                    // Initialize memory usage and limits, and register to update them on gen 2 GCs
                    threadPoolInstance.OnGen2GCCallback();
                    Gen2GcCallback.Register(threadPoolInstance.OnGen2GCCallback);
                }

                while (true)
                {
                    RunGateThreadEvent.WaitOne();
                    int currentTimeMs = Environment.TickCount;
                    delayHelper.SetGateActivitiesTime(currentTimeMs);

                    while (true)
                    {
                        bool wasSignaledToWake = DelayEvent.WaitOne((int)delayHelper.GetNextDelay(currentTimeMs));
                        currentTimeMs = Environment.TickCount;

                        // Thread count adjustment for cooperative blocking
                        do
                        {
                            PendingBlockingAdjustment pendingBlockingAdjustment = threadPoolInstance._pendingBlockingAdjustment;
                            if (pendingBlockingAdjustment == PendingBlockingAdjustment.None)
                            {
                                delayHelper.ClearBlockingAdjustmentDelay();
                                break;
                            }

                            bool previousDelayElapsed = false;
                            if (delayHelper.HasBlockingAdjustmentDelay)
                            {
                                previousDelayElapsed =
                                    delayHelper.HasBlockingAdjustmentDelayElapsed(currentTimeMs, wasSignaledToWake);
                                if (pendingBlockingAdjustment == PendingBlockingAdjustment.WithDelayIfNecessary &&
                                    !previousDelayElapsed)
                                {
                                    break;
                                }
                            }

                            uint nextDelayMs = threadPoolInstance.PerformBlockingAdjustment(previousDelayElapsed);
                            if (nextDelayMs <= 0)
                            {
                                delayHelper.ClearBlockingAdjustmentDelay();
                            }
                            else
                            {
                                delayHelper.SetBlockingAdjustmentTimeAndDelay(currentTimeMs, nextDelayMs);
                            }
                        } while (false);

                        //
                        // Periodic gate activities
                        //

                        if (!delayHelper.ShouldPerformGateActivities(currentTimeMs, wasSignaledToWake))
                        {
                            continue;
                        }

                        if (ThreadPool.EnableWorkerTracking && NativeRuntimeEventSource.Log.IsEnabled())
                        {
                            NativeRuntimeEventSource.Log.ThreadPoolWorkingThreadCount(
                                (uint)threadPoolInstance.GetAndResetHighWatermarkCountOfThreadsProcessingUserCallbacks());
                        }

                        int cpuUtilization = cpuUtilizationReader.CurrentUtilization;
                        threadPoolInstance._cpuUtilization = cpuUtilization;

                        bool needGateThreadForRuntime = ThreadPool.PerformRuntimeSpecificGateActivities(cpuUtilization);

                        if (!disableStarvationDetection &&
                            threadPoolInstance._pendingBlockingAdjustment == PendingBlockingAdjustment.None &&
                            threadPoolInstance._separated.numRequestedWorkers > 0 &&
                            SufficientDelaySinceLastDequeue(threadPoolInstance))
                        {
                            bool addWorker = false;
                            threadAdjustmentLock.Acquire();
                            try
                            {
                                // Don't add a thread if we're at max or if we are already in the process of adding threads.
                                // This logic is slightly different from the native implementation in CoreCLR because there are
                                // no retired threads. In the native implementation, when hill climbing reduces the thread count
                                // goal, threads that are stopped from processing work are switched to "retired" state, and they
                                // don't count towards the equivalent existing thread count. In this implementation, the
                                // existing thread count includes any worker thread that has not yet exited, including those
                                // stopped from working by hill climbing, so here the number of threads processing work, instead
                                // of the number of existing threads, is compared with the goal. There may be alternative
                                // solutions, for now this is only to maintain consistency in behavior.
                                ThreadCounts counts = threadPoolInstance._separated.counts;
                                while (
                                    counts.NumProcessingWork < threadPoolInstance._maxThreads &&
                                    counts.NumProcessingWork >= counts.NumThreadsGoal)
                                {
                                    if (debuggerBreakOnWorkStarvation)
                                    {
                                        Debugger.Break();
                                    }

                                    ThreadCounts newCounts         = counts;
                                    short        newNumThreadsGoal = (short)(counts.NumProcessingWork + 1);
                                    newCounts.NumThreadsGoal = newNumThreadsGoal;

                                    ThreadCounts countsBeforeUpdate =
                                        threadPoolInstance._separated.counts.InterlockedCompareExchange(newCounts, counts);
                                    if (countsBeforeUpdate == counts)
                                    {
                                        HillClimbing.ThreadPoolHillClimber.ForceChange(
                                            newNumThreadsGoal,
                                            HillClimbing.StateOrTransition.Starvation);
                                        addWorker = true;
                                        break;
                                    }

                                    counts = countsBeforeUpdate;
                                }
                            }
                            finally
                            {
                                threadAdjustmentLock.Release();
                            }

                            if (addWorker)
                            {
                                WorkerThread.MaybeAddWorkingWorker(threadPoolInstance);
                            }
                        }

                        if (!needGateThreadForRuntime &&
                            threadPoolInstance._separated.numRequestedWorkers <= 0 &&
                            threadPoolInstance._pendingBlockingAdjustment == PendingBlockingAdjustment.None &&
                            Interlocked.Decrement(ref threadPoolInstance._separated.gateThreadRunningState) <= GetRunningStateForNumRuns(0))
                        {
                            break;
                        }
                    }
                }
            }