Decrement() private method

private Decrement ( int &location ) : int
location int
return int
Beispiel #1
0
        protected override void Dispose(bool disposing)
        {
            // Unadvise solution listeners.
            try
            {
                if (disposing)
                {
                    // only decrement the reference count once, regardless of the number of times Dispose is called.
                    // Ignore if Initialize was never called.
                    if (_initialized && !_disposed && Interlocked.Decrement(ref _singleFileGeneratorNodeExtenderReferenceCount) == 0)
                    {
                        ObjectExtenders objectExtenders = (ObjectExtenders)GetService(typeof(ObjectExtenders));
                        objectExtenders.UnregisterExtenderProvider(_singleFileGeneratorNodeExtenderCookie);
                    }

                    foreach (SolutionListener solutionListener in this.solutionListeners)
                    {
                        solutionListener.Dispose();
                    }
                }
            }
            finally
            {
                _disposed = true;
                base.Dispose(disposing);
            }
        }
Beispiel #2
0
        /// <summary>
        /// Executes a foreach loop in which iterations may run in parallel
        /// </summary>
        /// <typeparam name="T">Object type that the collection wraps</typeparam>
        /// <param name="threadCount">The number of concurrent execution threads to run</param>
        /// <param name="enumerable">An enumerable collection to iterate over</param>
        /// <param name="body">Method body to run for each object in the collection</param>
        public static void ForEach <T>(int threadCount, IEnumerable <T> enumerable, Action <T> body)
        {
            int             counter           = threadCount;
            AutoResetEvent  threadFinishEvent = new AutoResetEvent(false);
            IEnumerator <T> enumerator        = enumerable.GetEnumerator();
            Exception       exception         = null;

            for (int i = 0; i < threadCount; i++)
            {
                ThreadPool.QueueUserWorkItem(
                    delegate(object o)
                {
                    int threadIndex = (int)o;

                    while (exception == null)
                    {
                        T entry;

                        lock (enumerator)
                        {
                            if (!enumerator.MoveNext())
                            {
                                break;
                            }
                            entry = (T)enumerator.Current;     // Explicit typecast for Mono's sake
                        }

                        try { body(entry); }
                        catch (Exception ex) { exception = ex; break; }
                    }

                    if (Interlocked.Decrement(ref counter) == 0)
                    {
                        threadFinishEvent.Set();
                    }
                }, i
                    );
            }

            threadFinishEvent.WaitOne();

            if (exception != null)
            {
                throw exception;
            }
        }
Beispiel #3
0
 public override AbstractEdgeMap <T> Put(int key, T value)
 {
     if (key >= minIndex && key <= maxIndex)
     {
         T existing = Interlocked.Exchange(ref arrayData[key - minIndex], value);
         if (existing == null && value != null)
         {
             Interlocked.Increment(ref size);
         }
         else
         {
             if (existing != null && value == null)
             {
                 Interlocked.Decrement(ref size);
             }
         }
     }
     return(this);
 }
Beispiel #4
0
        /// <summary>
        /// Executes a for loop in which iterations may run in parallel
        /// </summary>
        /// <param name="threadCount">The number of concurrent execution threads to run</param>
        /// <param name="fromInclusive">The loop will be started at this index</param>
        /// <param name="toExclusive">The loop will be terminated before this index is reached</param>
        /// <param name="body">Method body to run for each iteration of the loop</param>
        public static void For(int threadCount, int fromInclusive, int toExclusive, Action <int> body)
        {
            int            counter           = threadCount;
            AutoResetEvent threadFinishEvent = new AutoResetEvent(false);
            Exception      exception         = null;

            --fromInclusive;

            for (int i = 0; i < threadCount; i++)
            {
                ThreadPool.QueueUserWorkItem(
                    delegate(object o)
                {
                    int threadIndex = (int)o;

                    while (exception == null)
                    {
                        int currentIndex = Interlocked.Increment(ref fromInclusive);

                        if (currentIndex >= toExclusive)
                        {
                            break;
                        }

                        try { body(currentIndex); }
                        catch (Exception ex) { exception = ex; break; }
                    }

                    if (Interlocked.Decrement(ref counter) == 0)
                    {
                        threadFinishEvent.Set();
                    }
                }, i
                    );
            }

            threadFinishEvent.WaitOne();

            if (exception != null)
            {
                throw exception;
            }
        }
Beispiel #5
0
        /// <summary>
        /// Executes a series of tasks in parallel
        /// </summary>
        /// <param name="threadCount">The number of concurrent execution threads to run</param>
        /// <param name="actions">A series of method bodies to execute</param>
        public static void Invoke(int threadCount, params Action[] actions)
        {
            int            counter           = threadCount;
            AutoResetEvent threadFinishEvent = new AutoResetEvent(false);
            int            index             = -1;
            Exception      exception         = null;

            for (int i = 0; i < threadCount; i++)
            {
                ThreadPool.QueueUserWorkItem(
                    delegate(object o)
                {
                    int threadIndex = (int)o;

                    while (exception == null)
                    {
                        int currentIndex = Interlocked.Increment(ref index);

                        if (currentIndex >= actions.Length)
                        {
                            break;
                        }

                        try { actions[currentIndex](); }
                        catch (Exception ex) { exception = ex; break; }
                    }

                    if (Interlocked.Decrement(ref counter) == 0)
                    {
                        threadFinishEvent.Set();
                    }
                }, i
                    );
            }

            threadFinishEvent.WaitOne();

            if (exception != null)
            {
                throw exception;
            }
        }
Beispiel #6
0
            internal void Exit()
            {
                var op = this.Resource.Runtime.GetExecutingOperation <AsyncOperation>();

                this.Resource.Runtime.Assert(this.LockCountMap.ContainsKey(op), "Cannot invoke Dispose without acquiring the lock.");

                this.LockCountMap[op]--;
                if (this.LockCountMap[op] is 0)
                {
                    // Only release the lock if the invocation is not reentrant.
                    this.LockCountMap.Remove(op);
                    this.UnlockNextReady();
                    this.Resource.Runtime.ScheduleNextOperation(AsyncOperationType.Release);
                }

                int useCount = SystemInterlocked.Decrement(ref this.UseCount);

                if (useCount is 0 && Cache[this.SyncObject].Value == this)
                {
                    // It is safe to remove this instance from the cache.
                    Cache.TryRemove(this.SyncObject, out _);
                }
            }
Beispiel #7
0
 public void Exit()
 {
     Interlocked.Decrement(ref m_owner);
 }
            private static void GateThreadStart()
            {
                bool disableStarvationDetection =
                    AppContextConfigHelper.GetBooleanConfig("System.Threading.ThreadPool.DisableStarvationDetection", false);
                bool debuggerBreakOnWorkStarvation =
                    AppContextConfigHelper.GetBooleanConfig("System.Threading.ThreadPool.DebugBreakOnWorkerStarvation", false);

                // The first reading is over a time range other than what we are focusing on, so we do not use the read other
                // than to send it to any runtime-specific implementation that may also use the CPU utilization.
                CpuUtilizationReader cpuUtilizationReader = default;

                _ = cpuUtilizationReader.CurrentUtilization;

                PortableThreadPool threadPoolInstance   = ThreadPoolInstance;
                LowLevelLock       threadAdjustmentLock = threadPoolInstance._threadAdjustmentLock;
                DelayHelper        delayHelper          = default;

                if (BlockingConfig.IsCooperativeBlockingEnabled)
                {
                    // Initialize memory usage and limits, and register to update them on gen 2 GCs
                    threadPoolInstance.OnGen2GCCallback();
                    Gen2GcCallback.Register(threadPoolInstance.OnGen2GCCallback);
                }

                while (true)
                {
                    RunGateThreadEvent.WaitOne();
                    int currentTimeMs = Environment.TickCount;
                    delayHelper.SetGateActivitiesTime(currentTimeMs);

                    while (true)
                    {
                        bool wasSignaledToWake = DelayEvent.WaitOne((int)delayHelper.GetNextDelay(currentTimeMs));
                        currentTimeMs = Environment.TickCount;

                        // Thread count adjustment for cooperative blocking
                        do
                        {
                            PendingBlockingAdjustment pendingBlockingAdjustment = threadPoolInstance._pendingBlockingAdjustment;
                            if (pendingBlockingAdjustment == PendingBlockingAdjustment.None)
                            {
                                delayHelper.ClearBlockingAdjustmentDelay();
                                break;
                            }

                            bool previousDelayElapsed = false;
                            if (delayHelper.HasBlockingAdjustmentDelay)
                            {
                                previousDelayElapsed =
                                    delayHelper.HasBlockingAdjustmentDelayElapsed(currentTimeMs, wasSignaledToWake);
                                if (pendingBlockingAdjustment == PendingBlockingAdjustment.WithDelayIfNecessary &&
                                    !previousDelayElapsed)
                                {
                                    break;
                                }
                            }

                            uint nextDelayMs = threadPoolInstance.PerformBlockingAdjustment(previousDelayElapsed);
                            if (nextDelayMs <= 0)
                            {
                                delayHelper.ClearBlockingAdjustmentDelay();
                            }
                            else
                            {
                                delayHelper.SetBlockingAdjustmentTimeAndDelay(currentTimeMs, nextDelayMs);
                            }
                        } while (false);

                        //
                        // Periodic gate activities
                        //

                        if (!delayHelper.ShouldPerformGateActivities(currentTimeMs, wasSignaledToWake))
                        {
                            continue;
                        }

                        if (ThreadPool.EnableWorkerTracking && NativeRuntimeEventSource.Log.IsEnabled())
                        {
                            NativeRuntimeEventSource.Log.ThreadPoolWorkingThreadCount(
                                (uint)threadPoolInstance.GetAndResetHighWatermarkCountOfThreadsProcessingUserCallbacks());
                        }

                        int cpuUtilization = cpuUtilizationReader.CurrentUtilization;
                        threadPoolInstance._cpuUtilization = cpuUtilization;

                        bool needGateThreadForRuntime = ThreadPool.PerformRuntimeSpecificGateActivities(cpuUtilization);

                        if (!disableStarvationDetection &&
                            threadPoolInstance._pendingBlockingAdjustment == PendingBlockingAdjustment.None &&
                            threadPoolInstance._separated.numRequestedWorkers > 0 &&
                            SufficientDelaySinceLastDequeue(threadPoolInstance))
                        {
                            bool addWorker = false;
                            threadAdjustmentLock.Acquire();
                            try
                            {
                                // Don't add a thread if we're at max or if we are already in the process of adding threads.
                                // This logic is slightly different from the native implementation in CoreCLR because there are
                                // no retired threads. In the native implementation, when hill climbing reduces the thread count
                                // goal, threads that are stopped from processing work are switched to "retired" state, and they
                                // don't count towards the equivalent existing thread count. In this implementation, the
                                // existing thread count includes any worker thread that has not yet exited, including those
                                // stopped from working by hill climbing, so here the number of threads processing work, instead
                                // of the number of existing threads, is compared with the goal. There may be alternative
                                // solutions, for now this is only to maintain consistency in behavior.
                                ThreadCounts counts = threadPoolInstance._separated.counts;
                                while (
                                    counts.NumProcessingWork < threadPoolInstance._maxThreads &&
                                    counts.NumProcessingWork >= counts.NumThreadsGoal)
                                {
                                    if (debuggerBreakOnWorkStarvation)
                                    {
                                        Debugger.Break();
                                    }

                                    ThreadCounts newCounts         = counts;
                                    short        newNumThreadsGoal = (short)(counts.NumProcessingWork + 1);
                                    newCounts.NumThreadsGoal = newNumThreadsGoal;

                                    ThreadCounts countsBeforeUpdate =
                                        threadPoolInstance._separated.counts.InterlockedCompareExchange(newCounts, counts);
                                    if (countsBeforeUpdate == counts)
                                    {
                                        HillClimbing.ThreadPoolHillClimber.ForceChange(
                                            newNumThreadsGoal,
                                            HillClimbing.StateOrTransition.Starvation);
                                        addWorker = true;
                                        break;
                                    }

                                    counts = countsBeforeUpdate;
                                }
                            }
                            finally
                            {
                                threadAdjustmentLock.Release();
                            }

                            if (addWorker)
                            {
                                WorkerThread.MaybeAddWorkingWorker(threadPoolInstance);
                            }
                        }

                        if (!needGateThreadForRuntime &&
                            threadPoolInstance._separated.numRequestedWorkers <= 0 &&
                            threadPoolInstance._pendingBlockingAdjustment == PendingBlockingAdjustment.None &&
                            Interlocked.Decrement(ref threadPoolInstance._separated.gateThreadRunningState) <= GetRunningStateForNumRuns(0))
                        {
                            break;
                        }
                    }
                }
            }