예제 #1
0
        private static void Fork <TKey, TValue, TLocal>([NotNull] Dictionary <TKey, TValue> collection, int batchSize, int maxDegreeOfParallelism, [Pooled] Func <TLocal> initializeLocal, [Pooled] Action <KeyValuePair <TKey, TValue>, TLocal> action, [Pooled] Action <TLocal> finalizeLocal, [NotNull] BatchState state)
        {
            // Other threads already processed all work before this one started. ActiveWorkerCount is already 0
            if (state.StartInclusive >= collection.Count)
            {
                state.Release();
                return;
            }

            // This thread is now actively processing work items, meaning there might be work in progress
            Interlocked.Increment(ref state.ActiveWorkerCount);

            // Kick off another worker if there's any work left
            if (maxDegreeOfParallelism > 1 && state.StartInclusive + batchSize < collection.Count)
            {
                int workToSchedule = maxDegreeOfParallelism - 1;
                for (int i = 0; i < workToSchedule; i++)
                {
                    state.AddReference();
                }
                ThreadPool.Instance.QueueWorkItem(() => Fork(collection, batchSize, 0, initializeLocal, action, finalizeLocal, state), workToSchedule);
            }

            try
            {
                // Process batches synchronously as long as there are any
                int newStart;
                while ((newStart = Interlocked.Add(ref state.StartInclusive, batchSize)) - batchSize < collection.Count)
                {
                    try
                    {
                        // TODO: Reuse enumerator when processing multiple batches synchronously
                        var start = newStart - batchSize;
                        ExecuteBatch(collection, newStart - batchSize, Math.Min(collection.Count, newStart) - start, initializeLocal, action, finalizeLocal);
                    }
                    finally
                    {
                        if (Interlocked.Add(ref state.WorkDone, batchSize) >= collection.Count)
                        {
                            // Don't wait for other threads to wake up and signal the BatchState, release as soon as work is finished
                            state.Finished.Set();
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Interlocked.Exchange(ref state.ExceptionThrown, e);
                throw;
            }
            finally
            {
                state.Release();
            }
        }
예제 #2
0
        private static void Fork(int endExclusive, int batchSize, int maxDegreeOfParallelism, [Pooled] Action <int> action, [NotNull] BatchState state)
        {
            // Other threads already processed all work before this one started. ActiveWorkerCount is already 0
            if (state.StartInclusive >= endExclusive)
            {
                state.Release();
                return;
            }

            // This thread is now actively processing work items, meaning there might be work in progress
            Interlocked.Increment(ref state.ActiveWorkerCount);

            // Kick off another worker if there's any work left
            if (maxDegreeOfParallelism > 1 && state.StartInclusive + batchSize < endExclusive)
            {
                int workToSchedule = maxDegreeOfParallelism - 1;
                for (int i = 0; i < workToSchedule; i++)
                {
                    state.AddReference();
                }
                ThreadPool.Instance.QueueWorkItem(() => Fork(endExclusive, batchSize, 0, action, state), workToSchedule);
            }

            try
            {
                // Process batches synchronously as long as there are any
                int newStart;
                while ((newStart = Interlocked.Add(ref state.StartInclusive, batchSize)) - batchSize < endExclusive)
                {
                    try
                    {
                        ExecuteBatch(newStart - batchSize, Math.Min(endExclusive, newStart), action);
                    }
                    finally
                    {
                        if (Interlocked.Add(ref state.WorkDone, batchSize) >= endExclusive)
                        {
                            // Don't wait for other threads to wake up and signal the BatchState, release as soon as work is finished
                            state.Finished.Set();
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Interlocked.Exchange(ref state.ExceptionThrown, e);
                throw;
            }
            finally
            {
                state.Release();
            }
        }
예제 #3
0
        private static void Fork <TKey, TValue, TLocal>([NotNull] Dictionary <TKey, TValue> collection, int batchSize, int maxDegreeOfParallelism, [Pooled] Func <TLocal> initializeLocal, [Pooled] Action <KeyValuePair <TKey, TValue>, TLocal> action, [Pooled] Action <TLocal> finalizeLocal,
                                                        [NotNull] BatchState state)
        {
            // Other threads already processed all work before this one started. ActiveWorkerCount is already 0
            if (state.StartInclusive >= collection.Count)
            {
                state.Release();
                return;
            }

            // This thread is now actively processing work items, meaning there might be work in progress
            Interlocked.Increment(ref state.ActiveWorkerCount);

            // Kick off another worker if there's any work left
            if (maxDegreeOfParallelism > 1 && state.StartInclusive + batchSize < collection.Count)
            {
                state.AddReference();
                ThreadPool.Instance.QueueWorkItem(() => Fork(collection, batchSize, maxDegreeOfParallelism - 1, initializeLocal, action, finalizeLocal, state));
            }

            try
            {
                // Process batches synchronously as long as there are any
                int newStart;
                while ((newStart = Interlocked.Add(ref state.StartInclusive, batchSize)) - batchSize < collection.Count)
                {
                    // TODO: Reuse enumerator when processing multiple batches synchronously
                    var start = newStart - batchSize;
                    ExecuteBatch(collection, newStart - batchSize, Math.Min(collection.Count, newStart) - start, initializeLocal, action, finalizeLocal);
                }
            }
            finally
            {
                state.Release();

                // If this was the last batch, signal
                if (Interlocked.Decrement(ref state.ActiveWorkerCount) == 0)
                {
                    state.Finished.Set();
                }
            }
        }
예제 #4
0
        private static void Fork(int endExclusive, int batchSize, int maxDegreeOfParallelism, [Pooled] Action <int> action, [NotNull] BatchState state)
        {
            // Other threads already processed all work before this one started. ActiveWorkerCount is already 0
            if (state.StartInclusive >= endExclusive)
            {
                state.Release();
                return;
            }

            // This thread is now actively processing work items, meaning there might be work in progress
            Interlocked.Increment(ref state.ActiveWorkerCount);

            // Kick off another worker if there's any work left
            if (maxDegreeOfParallelism > 1 && state.StartInclusive + batchSize < endExclusive)
            {
                state.AddReference();
                ThreadPool.Instance.QueueWorkItem(() => Fork(endExclusive, batchSize, maxDegreeOfParallelism - 1, action, state));
            }

            try
            {
                // Process batches synchronously as long as there are any
                int newStart;
                while ((newStart = Interlocked.Add(ref state.StartInclusive, batchSize)) - batchSize < endExclusive)
                {
                    ExecuteBatch(newStart - batchSize, Math.Min(endExclusive, newStart), action);
                }
            }
            finally
            {
                state.Release();

                // If this was the last batch, signal
                if (Interlocked.Decrement(ref state.ActiveWorkerCount) == 0)
                {
                    state.Finished.Set();
                }
            }
        }