public Task SendAsync(Action work, bool checkLock, int timeOut)
        {
            ActionWrapper at = new ActionWrapper(work);

            at.TimeOut = timeOut;
            lock (nodeLock)
            {
                int thId    = TaskScheduler.Current.Id;
                var newNode = new ActorNode();
                if (threadMap.ContainsKey(thId))
                {
                    newNode.Parent = threadMap[thId].activeNode;
                }
                newNode.Actor = cacheActor;
                if (work.Target == null || work.Method.DeclaringType.Name.Contains("<>c"))
                {
                    newNode.Trace = work.Method.DeclaringType.FullName + ":" + work.Method.Name + "(" + work.ToString() + ")";
                }
                else
                {
                    newNode.Trace = work.Target.GetType().FullName + ":" + work.Method.Name + "(" + work.ToString() + ")";
                }
                if (checkLock)
                {
                    checkNodeIlegal(newNode);
                }
                else
                {
                    newNode.Parent = null;
                }
                at.Node = newNode;
            }
            actionBlock.SendAsync(at);
            return(at.Tcs.Task);
        }
Esempio n. 2
0
        internal static void Run()
        {
            // Create an ActionBlock<int> object that prints its input
            // and throws ArgumentOutOfRangeException if the input
            // is less than zero.
            var throwIfNegative = new ActionBlock <int>(n =>
            {
                Console.WriteLine("n = {0}", n);
                if (n < 0)
                {
                    throw new ArgumentOutOfRangeException();
                }
            });

            // Create a continuation task that prints the overall
            // task status to the console when the block finishes.
            throwIfNegative.Completion.ContinueWith(task =>
            {
                Console.WriteLine("The status of the completion task is '{0}'.", task.Status);
            });

            // Post values to the block.
            throwIfNegative.SendAsync(0);
            throwIfNegative.SendAsync(-1);
            throwIfNegative.SendAsync(1);
            throwIfNegative.SendAsync(-2);
            throwIfNegative.Complete();

            WaitForCompletion(throwIfNegative);
        }
Esempio n. 3
0
        public async Task <IEnumerable <string> > Process()
        {
            await _actionFetchBlock.SendAsync(0);

            await _actionFetchBlock.Completion;

            return(_generatedFiles.ToList());
        }
Esempio n. 4
0
        public async Task ConsumeAsync(IObservable <RawCapture> frames)
        {
            await frames.ForEachAsync(async frame =>
            {
                await m_inputBlock.SendAsync(frame);
            });

            m_inputBlock.Complete();
        }
        /// <inheritdoc />
        public async Task ScheduleEventAsync(ProtocolEvent ev, bool processNow)
        {
            var sent = await _enqueueBlock
                       .SendAsync(new Tuple <TaskCompletionSource <bool>, ProtocolEvent, bool>(null, ev, processNow))
                       .ConfigureAwait(false);

            if (!sent)
            {
                throw new DriverInternalError("Could not schedule event in the ProtocolEventDebouncer.");
            }
        }
Esempio n. 6
0
        //[Fact(Skip = "Outerloop")]
        public void TestReleasingOfPostponedMessages()
        {
            const int excess = 5;

            for (int dop = 1; dop <= Parallelism.ActualDegreeOfParallelism; dop++)
            {
                var localPassed            = true;
                var nextOfferEvent         = new AutoResetEvent(true);
                var releaseProcessingEvent = new ManualResetEventSlim();
                var options = new ExecutionDataflowBlockOptions {
                    MaxDegreeOfParallelism = dop, BoundedCapacity = dop
                };
                var action          = new ActionBlock <int>(x => { nextOfferEvent.Set(); releaseProcessingEvent.Wait(); }, options);
                var sendAsyncDop    = new Task <bool> [dop];
                var sendAsyncExcess = new Task <bool> [excess];

                // Send DOP messages
                for (int i = 0; i < dop; i++)
                {
                    // Throttle sending to make sure we saturate DOP exactly
                    nextOfferEvent.WaitOne();
                    sendAsyncDop[i] = action.SendAsync(i);
                }

                // Send EXCESS more messages. All of these will surely be postponed
                for (int i = 0; i < excess; i++)
                {
                    sendAsyncExcess[i] = action.SendAsync(dop + i);
                }

                // Wait until the tasks for the first DOP messages get completed
                Task.WaitAll(sendAsyncDop, 5000);

                // Complete the block. This will cause the EXCESS messages to be declined.
                action.Complete();
                releaseProcessingEvent.Set();

                // Verify all DOP messages have been accepted
                for (int i = 0; i < dop; i++)
                {
                    localPassed &= sendAsyncDop[i].Result;
                }
                Assert.True(localPassed, string.Format("DOP={0} : Consumed up to DOP - {1}", dop, localPassed ? "Passed" : "FAILED"));


                // Verify all EXCESS messages have been declined
                localPassed = true;
                for (int i = 0; i < excess; i++)
                {
                    localPassed &= !sendAsyncExcess[i].Result;
                }
                Assert.True(localPassed, string.Format("DOP={0} : Declined excess - {1}", dop, localPassed ? "Passed" : "FAILED"));
            }
        }
        static async Task BlockAsyncQueue()
        {
            //异步的生产者代码
            ActionBlock <int> queue = new ActionBlock <int>(item => Console.WriteLine(item));
            await queue.SendAsync(7);

            await queue.SendAsync(13);

            //同步的生产者代码
            queue.Post(7);
            queue.Post(13);
            queue.Complete();
        }
Esempio n. 8
0
        //ActionBlock<T>定义一个带有特定动作的生产者/消费者队列。
        async void Example16()
        {
            ActionBlock <int> queue = new ActionBlock <int>(item => Trace.WriteLine(item));//消费者代码被传给队列的构造函数
            //异步的生产者代码
            await queue.SendAsync(7);

            await queue.SendAsync(13);

            //同步的生产者代码
            queue.Post(7);
            queue.Post(13);
            queue.Complete();
        }
Esempio n. 9
0
        public Task SendAsync(Action work, bool checkLock = true, int timeOut = TIME_OUT)
        {
            if (Settings.Ins.IsDebug)
            {
                return(checkActor.SendAsync(work, checkLock, timeOut));
            }

            ActionWrapper at = new ActionWrapper(work);

            at.Owner   = this;
            at.TimeOut = timeOut;
            actionBlock.SendAsync(at);
            return(at.Tcs.Task);
        }
Esempio n. 10
0
 public void Enqueue(Action action)
 {
     if (!queue.Post(action))
     {
         queue.SendAsync(action).Wait();
     }
 }
        public async Task Start()
        {
            // Options of the many-threads blocks
            // Limit a max bounded capacity to improve a consumption of memory
            var parallelBlockOptions = new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = Environment.ProcessorCount, BoundedCapacity = 1000
            };

            // Blocks of the processing data
            var eventBlock = new ActionBlock <string>((text) => callback(text), parallelBlockOptions);

            // Reading tech log block
            var readBlock = new ActionBlock <string>((filePath) => TechLogHelper.ReadLogFile(filePath, eventBlock), parallelBlockOptions);

            foreach (var file in logFiles)
            {
                await readBlock.SendAsync(file);
            }

            // Mark block as completed
            readBlock.Complete();

            await readBlock.Completion.ContinueWith(c => eventBlock.Complete());

            await eventBlock.Completion;
        }
Esempio n. 12
0
        static async Task MainAsync()
        {
            // Create service collection
            ServiceCollection serviceCollection = new ServiceCollection();

            ConfigureServices(serviceCollection);

            // Create service provider
            IServiceProvider serviceProvider = serviceCollection.BuildServiceProvider();

            // Get backup sources for client
            List <String> sources = configuration.GetSection("Backup:Sources").GetChildren().Select(x => x.Value).ToList();

            // Run all tasks
            //await Task.WhenAll(sources.Select(i => serviceProvider.GetService<App>().Run(i)).ToArray());

            // Create a block with an asynchronous action
            var block = new ActionBlock <string>(
                async x => await serviceProvider.GetService <App>().Run(x),
                new ExecutionDataflowBlockOptions
            {
                BoundedCapacity        = int.Parse(configuration["Backup:BoundedCapacity"]), // Cap the item count
                MaxDegreeOfParallelism = int.Parse(configuration["Backup:MaxDegreeOfParallelism"])
                                                                                             //MaxDegreeOfParallelism = Environment.ProcessorCount, // Parallelize on all cores
            });

            // Add items to the block and asynchronously wait if BoundedCapacity is reached
            foreach (string source in sources)
            {
                await block.SendAsync(source);
            }

            block.Complete();
            await block.Completion;
        }
Esempio n. 13
0
 private void GetLastEventProcessedForHandlers()
 {
     //TODO calculate the lowest numbered LEP of all the handlers and use that for the start position 
     // ask each registered handler to get there last processed event and hold on to it.
     var actionBlock = new ActionBlock<IHandler>(x => x.GetLastPositionProcessed(), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 8 });
     _eventHandlers.ForEach(async x=> await actionBlock.SendAsync(x));
 }
Esempio n. 14
0
        public void ProduceLogs(int count, int buffSize)
        {
            var bufferOptions = new DataflowBlockOptions() { BoundedCapacity = buffSize };
            var writerOptions = new ExecutionDataflowBlockOptions() { BoundedCapacity = 10, MaxDegreeOfParallelism = 1, MaxMessagesPerTask = 10, SingleProducerConstrained = true };

            LogGenerator g = new LogGenerator();

            var file = new StreamWriter("basic.async.buff.log", false);

            BufferBlock<string> buffer = new BufferBlock<string>(bufferOptions);
            ActionBlock<string> writer = new ActionBlock<string>(s => file.WriteLine(s), writerOptions);

            buffer.LinkTo(writer, new DataflowLinkOptions() { PropagateCompletion = true });

            for (int i = 0; i < count; i++)
            {
                g.Next();

                var line = string.Format(g.FormatStr, g.Param1, g.Param2, g.Param3, g.Param4, g.Param5, g.Param6);
                writer.SendAsync(line).Wait();
            }

            buffer.Complete();

            Completed = writer.Completion.ContinueWith(t => file.Close());
        }
Esempio n. 15
0
        //private ConcurrentBag<bool> results;

        //private int _consoleRow { get; set; } = 2;

        public TplDataFlowQueue(BlockingCollection <WorkItem> jobQueue)
        {
            var executionDataFlowBlockOptions = new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = 3,
                BoundedCapacity        = 1
            };

            _actionBlockOfJobs = new ActionBlock <WorkItem>(job =>
            {
                //Thread.Sleep(3000);
                job.Execute(Thread.CurrentThread.ManagedThreadId);
                //_consoleRow += 1;
            }, executionDataFlowBlockOptions);

            //_jobQueue = new BlockingCollection<WorkItem>(GetPriorityQueue());

            Task.Run(async() =>
            {
                foreach (var job in jobQueue.GetConsumingEnumerable())
                {
                    await _actionBlockOfJobs.SendAsync(job);
                }
            });
        }
        public async Task TestReleasingOfPostponedMessages()
        {
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            {
                Barrier      barrier1 = new Barrier(2), barrier2 = new Barrier(2);
                Action <int> body    = i => { barrier1.SignalAndWait(); barrier2.SignalAndWait(); };
                var          options = new ExecutionDataflowBlockOptions {
                    BoundedCapacity = 1
                };
                ActionBlock <int> ab = sync ?
                                       new ActionBlock <int>(body, options) :
                                       new ActionBlock <int>(i => TaskShim.Run(() => body(i)), options);

                ab.Post(0);
                barrier1.SignalAndWait();

                Task <bool>[] sends = Enumerable.Range(0, 10).Select(i => ab.SendAsync(i)).ToArray();
                Assert.All(sends, s => Assert.False(s.IsCompleted));

                ab.Complete();
                barrier2.SignalAndWait();

                await ab.Completion;

                Assert.All(sends, s => Assert.False(s.Result));
            }
        }
Esempio n. 17
0
        internal override async Task GetSeriesImageImpAsync(GetSeriesImageRequest request, CancellationToken ct)
        {
            var createImage = new ActionBlock <Series>(async ser =>
            {
                var fsSeries = ser as FileSysSeriesImp;
                if (fsSeries == null)
                {
                    _logger.Warning("{service} got unexpected type of series object {@Series}", GetType(), ser);
                    return;
                }

                var image = await CreateFromSeriesDirAsync(fsSeries.SeriesDir, (dicomFile) => { return(dicomFile.CreateImage()); });
                if (image != null)
                {
                    if (ser.Orientation == FDCSeriesOrientation.eFDC_NO_ORIENTATION &&
                        image.Orientation != FDCSeriesOrientation.eFDC_NO_ORIENTATION)
                    {
                        ser.Orientation = image.Orientation;
                    }

                    request.RaiseImageGot(image);
                }
            }, new ExecutionDataflowBlockOptions {
                CancellationToken = ct, MaxDegreeOfParallelism = 1
            });


            foreach (var ser in request.Series)
            {
                await createImage.SendAsync(ser, ct);
            }

            createImage.Complete();
            await createImage.Completion;
        }
Esempio n. 18
0
 public async Task UploadAsync(IEnumerable <AssetModel> assets)
 {
     foreach (var asset in assets)
     {
         await pipeline.SendAsync(asset);
     }
 }
Esempio n. 19
0
        static async Task MainAsync(string[] args)
        {
            // Create service collection
            ServiceCollection serviceCollection = new ServiceCollection();

            ConfigureServices(serviceCollection);

            // Create service provider
            Log.Information("Building service provider");
            IServiceProvider serviceProvider = serviceCollection.BuildServiceProvider();

            // Get list of objects for backup
            List <Backup> backups = new List <Backup>();

            configuration.GetSection("Backups").Bind(backups);

            // Create a block with an asynchronous action
            Log.Information("Backing up databases");
            var block = new ActionBlock <Backup>(
                async x => await serviceProvider.GetService <App>().Run(x),
                new ExecutionDataflowBlockOptions
            {
                BoundedCapacity        = int.Parse(configuration["Threading:BoundedCapacity"]), // Cap the item count
                MaxDegreeOfParallelism = int.Parse(configuration["Threading:MaxDegreeOfParallelism"])
            });

            // Add items to the block and asynchronously wait if BoundedCapacity is reached
            foreach (Backup backup in backups)
            {
                await block.SendAsync(backup);
            }

            block.Complete();
            await block.Completion;
        }
Esempio n. 20
0
        /****************************************************************************/
        public static async Task ParallelForEach <T>(this IEnumerable <T> list, Func <long, T, Task> fnEach, int maxParallelism = 128, CancellationToken cancelToken = default)
        {
            maxParallelism = Math.Min(1024, Math.Max(2, maxParallelism));

            var block = new ActionBlock <Block <T> >(async(payload) =>
            {
                await fnEach(payload.Index, payload.Data);
            },
                                                     new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = maxParallelism, CancellationToken = cancelToken
            });

            long i = 0L;

            foreach (var item in list)
            {
                if (cancelToken.IsCancellationRequested)
                {
                    break;
                }

                await block.SendAsync(new Block <T> {
                    Index = i++, Data = item
                });
            }

            block.Complete();
            await block.Completion;
        }
        public async Task Send(TextProcessingContext context, IPipe <TextProcessingContext> next)
        {
            var lineProcessingContexts = new List <LineProcessingContext>();

            for (int lineIndex = 0; lineIndex < context.RawLines.Length; lineIndex++)
            {
                lineProcessingContexts.Add(
                    new LineProcessingContext
                {
                    RawLine   = context.RawLines[lineIndex],
                    LineIndex = lineIndex
                });
            }

            var tasks = new List <Task>();

            foreach (var lineProcessingContext in lineProcessingContexts)
            {
                tasks.Add(_processLine.SendAsync(lineProcessingContext));
            }

            await Task.WhenAll(tasks);

            foreach (var lineProcessingContext in lineProcessingContexts)
            {
                context.ProcessedLines.Add(lineProcessingContext.ProcessedLine);
            }

            await next.Send(context);
        }
Esempio n. 22
0
        public async Task HandleTransactionsReceivedAsync(TransactionsReceivedEvent eventData)
        {
            if (_bestChainHash == Hash.Empty)
            {
                return;
            }

            foreach (var transaction in eventData.Transactions)
            {
                if (_processTransactionJobs.InputCount > _transactionOptions.PoolLimit)
                {
                    Logger.LogWarning("Already too many transaction processing job enqueued.");
                    break;
                }

                var queuedTransaction = new QueuedTransaction
                {
                    TransactionId = transaction.GetHash(),
                    Transaction   = transaction,
                    EnqueueTime   = TimestampHelper.GetUtcNow()
                };
                var sendResult = await _processTransactionJobs.SendAsync(queuedTransaction);

                if (!sendResult)
                {
                    Logger.LogWarning($"Process transaction:{queuedTransaction.TransactionId} failed.");
                }
            }
        }
        public async Task ActionBlockWillProcessAllAcceptedMessagesBeforeCompletion()
        {
            int      processedMessageCount  = 0;
            TimeSpan ProcessingTime         = TimeSpan.FromMilliseconds(50);
            TimeSpan ProcessingTimeTimesTen = TimeSpan.FromMilliseconds(500);

            ActionBlock <int> ab = new ActionBlock <int>(
                (msgId) =>
            {
                Task.Delay(ProcessingTime);
                Interlocked.Increment(ref processedMessageCount);
            },
                new ExecutionDataflowBlockOptions
            {
                BoundedCapacity           = 3,
                MaxDegreeOfParallelism    = 2,
                SingleProducerConstrained = false
            });

            // Send 10 messages as fast as possible
            Task.WaitAll(Enumerable.Range(0, 10).Select((i) => ab.SendAsync(i)).ToArray(), BurstTimeout);

            // Wait for completion and ensure that it does not time out and that all messages were processed before completion.
            ab.Complete();
            await Task.WhenAny(ab.Completion, Task.Delay(ProcessingTimeTimesTen + MessageArrivalTimeout));

            Assert.True(ab.Completion.IsCompleted);
            Assert.Equal(10, processedMessageCount);
        }
        public async Task TestNonGreedy()
        {
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            {
                var          barrier1 = new Barrier(2);
                Action <int> body     = _ => barrier1.SignalAndWait();
                var          options  = new ExecutionDataflowBlockOptions {
                    BoundedCapacity = 1
                };

                ActionBlock <int> ab = sync ?
                                       new ActionBlock <int>(body, options) :
                                       new ActionBlock <int>(i => TaskShim.Run(() => body(i)), options);

                Task <bool>[] sends = Enumerable.Range(0, 10).Select(i => ab.SendAsync(i)).ToArray();
                for (int i = 0; i < sends.Length; i++)
                {
                    Assert.True(sends[i].Result);                     // Next send should have completed and with the value successfully accepted
                    for (int j = i + 1; j < sends.Length; j++)        // No further sends should have completed yet
                    {
                        Assert.False(sends[j].IsCompleted);
                    }
                    barrier1.SignalAndWait();
                }

                ab.Complete();
                await ab.Completion;
            }
        }
        private void Process()
        {
            if (_queue == null)
            {
                return;
            }

            // this is running with the exclusive scheduler so this is fine
            var queue = _queue;

            _queue = null;

            // not necessary to enqueue within the exclusive scheduler
            Task.Run(async() =>
            {
                var sent = false;
                try
                {
                    sent = await _processQueueBlock.SendAsync(queue).ConfigureAwait(false);
                }
                catch (Exception ex)
                {
                    ProtocolEventDebouncer.Logger.Error("EventDebouncer timer callback threw an exception.", ex);
                }

                if (!sent)
                {
                    foreach (var cb in queue.Callbacks)
                    {
                        cb?.TrySetException(new DriverInternalError("Could not process events in the ProtocolEventDebouncer."));
                    }
                }
            }).Forget();
        }
Esempio n. 26
0
        /// <summary>
        /// Нормализует технологический журнал и выводит в переданный поток для дальнейшего соединения с трассировкой SQL
        /// </summary>
        /// <param name="outputStream">Поток для вывода данных</param>
        public static async Task NormalizeEventsForJoin(string logDirectoryPath, StreamWriter outputStream)
        {
            var logFiles = GetLogFilesPaths(logDirectoryPath);

            var blockOptions = new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = Environment.ProcessorCount
            };

            var writeToOutputStream = new ActionBlock <string>(text => Common.WriteToOutputStream(text, outputStream));

            var normalizeEventForJoin = new TransformBlock <string, string>(NormalizeEventForJoin, blockOptions);

            var readFile = new ActionBlock <string>(async text => await ReadFile(text, "DBMSSQL", normalizeEventForJoin), blockOptions);

            normalizeEventForJoin.LinkTo(writeToOutputStream, new DataflowLinkOptions()
            {
                PropagateCompletion = true
            });

            foreach (var file in logFiles)
            {
                await readFile.SendAsync(file);
            }

            readFile.Complete();

            await readFile.Completion.ContinueWith(c => normalizeEventForJoin.Complete());

            await writeToOutputStream.Completion;
        }
        private async Task BloatDbAsync(Context context, IMemoizationSession session)
        {
            uint dummyFingerprintsToAdd = 40; // generates a ~52KB DB file
            var  addBlock = new ActionBlock <int>(
                async _ =>
            {
                var strongFingerprint = StrongFingerprint.Random();
                var contentHashListWithDeterminism = new ContentHashListWithDeterminism(
                    ContentHashList.Random(), Determinism[DeterminismNone]);

                var result = await session.AddOrGetContentHashListAsync(
                    context, strongFingerprint, contentHashListWithDeterminism, Token);
                Assert.True(result.Succeeded);
            },
                new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = System.Environment.ProcessorCount
            });

            while (--dummyFingerprintsToAdd > 0)
            {
                await addBlock.SendAsync(0);
            }

            addBlock.Complete();
            await addBlock.Completion;
        }
        /// <summary>
        /// Start a MJPEG on a http stream
        /// </summary>
        /// <param name="action">Delegate to run at each frame</param>
        /// <param name="url">url of the http stream (only basic auth is implemented)</param>
        /// <param name="login">optional login</param>
        /// <param name="password">optional password (only basic auth is implemented)</param>
        /// <param name="token">cancellation token used to cancel the stream parsing</param>
        /// <param name="chunkMaxSize">Max chunk byte size when reading stream</param>
        /// <param name="frameBufferSize">Maximum frame byte size</param>
        /// <returns></returns>
        ///
        public async static Task StartAsync(Action <Image> action, string url, string login = null, string password = null, CancellationToken?token = null, int chunkMaxSize = 1024, int frameBufferSize = 1024 * 1024)
        {
            var instance = new SimpleMJPEGDecoderDataFlow {
                frameBuffer = new byte[frameBufferSize],
                action      = action
            };

            var tok = token ?? CancellationToken.None;

            using (var cli = new HttpClient()) {
                if (!string.IsNullOrEmpty(login) && !string.IsNullOrEmpty(password))
                {
                    cli.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes($"{login}:{password}")));
                }

                using (var stream = await cli.GetStreamAsync(url).ConfigureAwait(false)) {
                    // Continuously pump the stream. The cancellationtoken is used to get out of there
                    while (true)
                    {
                        var streamBuffer = bm.TakeBuffer(chunkMaxSize);
                        var streamLength = await stream.ReadAsync(streamBuffer, 0, chunkMaxSize, tok).ConfigureAwait(false);

                        await block.SendAsync(Tuple.Create(instance, streamLength, streamBuffer));
                    }
                    ;
                }
            }
        }
Esempio n. 29
0
        public async Task QueryAsync(CancellationToken ct)
        {
            if (currentHandler == null)
            {
                return;
            }

            while (!ct.IsCancellationRequested)
            {
                try
                {
                    var time = clock.GetCurrentInstant();

                    var document = await schedulerStore.DequeueAsync(time);

                    if (document == null)
                    {
                        var oldTime = time.PlusTicks(-schedulerOptions.FailedTimeout.Ticks);

                        await schedulerStore.ResetDeadAsync(oldTime, time);

                        break;
                    }

                    await actionBlock.SendAsync(document, ct);
                }
                catch (Exception ex)
                {
                    log.LogError(ex, w => w
                                 .WriteProperty("action", "DequeueJobs")
                                 .WriteProperty("status", "Failed"));
                }
            }
        }
        public async Task ActionBlockTest()
        {
            var        sw    = new Stopwatch();
            const long ITERS = 100000000;

            Console.WriteLine("test count {0:N0}", ITERS);
            var are = new AutoResetEvent(false);

            var ab = new ActionBlock <int>(i => { if (i == ITERS)
                                                  {
                                                      are.Set();
                                                  }
                                           });

            for (var c = 0; c < 20; c++)
            {
                sw.Restart();
                for (int i = 1; i <= ITERS; i++)
                {
                    await ab.SendAsync(i);
                }
                are.WaitOne();
                sw.Stop();
                Console.WriteLine("test {0},Messages / sec: {1:N0} ops,run {2} ms",
                                  c, (ITERS * 1000 / sw.ElapsedMilliseconds), sw.ElapsedMilliseconds);
            }
        }
        public async Task ActionBlockBuffersAndPostponesMessagesWhenActionSlow()
        {
            int      processedMessageCount  = 0;
            TimeSpan ProcessingTime         = TimeSpan.FromMilliseconds(50);
            TimeSpan ProcessingTimeTimesTen = TimeSpan.FromMilliseconds(500);

            ActionBlock <int> ab = new ActionBlock <int>(
                (msgId) =>
            {
                Task.Delay(ProcessingTime);
                Interlocked.Increment(ref processedMessageCount);
            },
                new ExecutionDataflowBlockOptions
            {
                BoundedCapacity           = 3,
                MaxDegreeOfParallelism    = 2,
                SingleProducerConstrained = false
            });

            // Send 10 messages as fast as possible
            Task.WaitAll(Enumerable.Range(0, 10).Select((i) => ab.SendAsync(i)).ToArray(), BurstTimeout);

            // Assumption: all will be eventually processed.
            bool allProcessed = await TaskUtils.PollWaitAsync(() => processedMessageCount == 10, ProcessingTimeTimesTen + MessageArrivalTimeout);

            Assert.True(allProcessed);
        }
Esempio n. 32
0
        public static async Task <ActionBlock <T> > ParallelEnumerateAsync <T>(IEnumerable <T> items, CancellationToken cancellationToken, int maxDegreeOfParallelism, Func <T, CancellationToken, Task> func)
        {
            var dataflowBlockOptions = new System.Threading.Tasks.Dataflow.ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = maxDegreeOfParallelism,
                CancellationToken      = cancellationToken
            };

            var workerBlock = new ActionBlock <T>(item => func(item, cancellationToken), dataflowBlockOptions);

            foreach (var item in items)
            {
                await Policy
                .Handle <Exception>()
                .WaitAndRetryAsync(new[] { TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(60) })
                .ExecuteAsync(async(ct) =>
                {
                    var sent = await workerBlock.SendAsync(item, cancellationToken);

                    if (!sent)
                    {
                        throw new Exception($"Could not process item: {item}");
                    }
                },
                              cancellationToken
                              );
            }

            workerBlock.Complete();

            await workerBlock.Completion;

            return(workerBlock);
        }
        public void ControlledTaskScheduler_WaitActionBlock_ThrowException() {
            Func<object, Task> f = o => Task.Factory.StartNew(SleepAndThrow);
            ActionBlock<object> actionBlock = new ActionBlock<object>(f, new ExecutionDataflowBlockOptions { TaskScheduler = _scheduler });

            Action a = () => _scheduler.Wait(actionBlock);
            actionBlock.SendAsync(null);
            a.ShouldThrow<CustomException>();
        }
        public async Task ActionBlockTest()
        {
            var sw = new Stopwatch();
            const long ITERS = 100000000;
            Console.WriteLine("test count {0:N0}",ITERS);
            var are = new AutoResetEvent(false);

            var ab = new ActionBlock<int>(i => { if (i == ITERS) are.Set(); });
            for(var c=0;c<20;c++)
            {
                sw.Restart();
                for (int i = 1; i <= ITERS; i++)
                   await ab.SendAsync(i);
                are.WaitOne();
                sw.Stop();
                Console.WriteLine("test {0},Messages / sec: {1:N0} ops,run {2} ms",
                    c,(ITERS * 1000 / sw.ElapsedMilliseconds), sw.ElapsedMilliseconds);
            }
        }
Esempio n. 35
0
        //[Fact(Skip = "Outerloop")]
        public void RunActionBlockConformanceTests()
        {
            // SYNC
            // Do everything twice - once through OfferMessage and Once through Post
            for (FeedMethod feedMethod = FeedMethod._First; feedMethod < FeedMethod._Count; feedMethod++)
            {
                Func<DataflowBlockOptions, TargetProperties<int>> actionBlockFactory =
                    options =>
                    {
                        ITargetBlock<int> target = new ActionBlock<int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options);
                        return new TargetProperties<int> { Target = target, Capturer = target, ErrorVerifyable = true };
                    };

                CancellationTokenSource cancellationSource = new CancellationTokenSource();
                var defaultOptions = new ExecutionDataflowBlockOptions();
                var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount };
                var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 1 };
                var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 1, CancellationToken = cancellationSource.Token };
                var spscOptions = new ExecutionDataflowBlockOptions { SingleProducerConstrained = true };
                var spscMptOptions = new ExecutionDataflowBlockOptions { SingleProducerConstrained = true, MaxMessagesPerTask = 10 };

                Assert.True(FeedTarget(actionBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, dopOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, mptOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, mptOptions, 1, Intervention.Complete, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, cancellationOptions, 1, Intervention.Cancel, cancellationSource, feedMethod, true));

                Assert.True(FeedTarget(actionBlockFactory, spscOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, spscOptions, 1, Intervention.Complete, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, spscMptOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, spscMptOptions, 1, Intervention.Complete, null, feedMethod, true));
            }

            // Test scheduler usage
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    var sts = new SimpleTaskScheduler();

                    var options = new ExecutionDataflowBlockOptions { TaskScheduler = sts, MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, MaxMessagesPerTask = 1 };
                    if (trial == 0) options.SingleProducerConstrained = true;

                    var ab = new ActionBlock<int>(i => localPassed &= TaskScheduler.Current.Id == sts.Id, options);
                    for (int i = 0; i < 2; i++) ab.Post(i);
                    ab.Complete();
                    ab.Completion.Wait();
                }

                Assert.True(localPassed, string.Format("{0}: Correct scheduler usage", localPassed ? "Success" : "Failure"));
            }

            // Test count
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    var barrier1 = new Barrier(2);
                    var barrier2 = new Barrier(2);
                    var ab = new ActionBlock<int>(i =>
                    {
                        barrier1.SignalAndWait();
                        barrier2.SignalAndWait();
                    }, new ExecutionDataflowBlockOptions { SingleProducerConstrained = (trial == 0) });
                    for (int iter = 0; iter < 2; iter++)
                    {
                        for (int i = 1; i <= 2; i++) ab.Post(i);
                        for (int i = 1; i >= 0; i--)
                        {
                            barrier1.SignalAndWait();
                            localPassed &= i == ab.InputCount;
                            barrier2.SignalAndWait();
                        }
                    }
                }

                Assert.True(localPassed, string.Format("{0}: InputCount", localPassed ? "Success" : "Failure"));
            }

            // Test ordering
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    int prev = -1;
                    var ab = new ActionBlock<int>(i =>
                    {
                        if (prev + 1 != i) localPassed &= false;
                        prev = i;
                    }, new ExecutionDataflowBlockOptions { SingleProducerConstrained = (trial == 0) });
                    for (int i = 0; i < 2; i++) ab.Post(i);
                    ab.Complete();
                    ab.Completion.Wait();
                }

                Assert.True(localPassed, string.Format("{0}: Correct ordering", localPassed ? "Success" : "Failure"));
            }

            // Test non-greedy
            {
                bool localPassed = true;
                var barrier = new Barrier(2);
                var ab = new ActionBlock<int>(i =>
                {
                    barrier.SignalAndWait();
                }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });
                ab.SendAsync(1);
                Task.Delay(200).Wait();
                var sa2 = ab.SendAsync(2);
                localPassed &= !sa2.IsCompleted;
                barrier.SignalAndWait(); // for SendAsync(1)
                barrier.SignalAndWait(); // for SendAsync(2)
                localPassed &= sa2.Wait(100);
                int total = 0;
                ab = new ActionBlock<int>(i =>
                {
                    Interlocked.Add(ref total, i);
                    Task.Delay(1).Wait();
                }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });
                for (int i = 1; i <= 100; i++) ab.SendAsync(i);
                SpinWait.SpinUntil(() => total == ((100 * 101) / 2), 30000);
                localPassed &= total == ((100 * 101) / 2);
                Assert.True(localPassed, string.Format("total={0} (must be {1})", total, (100 * 101) / 2));
                Assert.True(localPassed, string.Format("{0}: Non-greedy support", localPassed ? "Success" : "Failure"));
            }

            // Test that OperationCanceledExceptions are ignored
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    int sumOfOdds = 0;
                    var ab = new ActionBlock<int>(i =>
                    {
                        if ((i % 2) == 0) throw new OperationCanceledException();
                        sumOfOdds += i;
                    }, new ExecutionDataflowBlockOptions { SingleProducerConstrained = (trial == 0) });
                    for (int i = 0; i < 4; i++) ab.Post(i);
                    ab.Complete();
                    ab.Completion.Wait();
                    localPassed = sumOfOdds == (1 + 3);
                }

                Assert.True(localPassed, string.Format("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"));
            }

            // Test using a precanceled token
            {
                bool localPassed = true;
                try
                {
                    var cts = new CancellationTokenSource();
                    cts.Cancel();
                    var dbo = new ExecutionDataflowBlockOptions { CancellationToken = cts.Token };
                    var ab = new ActionBlock<int>(i => { }, dbo);

                    localPassed &= ab.Post(42) == false;
                    localPassed &= ab.InputCount == 0;
                    localPassed &= ab.Completion != null;
                    ab.Complete();
                }
                catch (Exception)
                {
                    localPassed = false;
                }

                Assert.True(localPassed, string.Format("{0}: Precanceled tokens work correctly", localPassed ? "Success" : "Failure"));
            }

            // Test faulting
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    var ab = new ActionBlock<int>(i => { throw new InvalidOperationException(); },
                        new ExecutionDataflowBlockOptions { SingleProducerConstrained = (trial == 0) });
                    ab.Post(42);
                    ab.Post(1);
                    ab.Post(2);
                    ab.Post(3);
                    try { localPassed &= ab.Completion.Wait(5000); }
                    catch { }
                    localPassed &= ab.Completion.IsFaulted;
                    localPassed &= SpinWait.SpinUntil(() => ab.InputCount == 0, 500);
                    localPassed &= ab.Post(4) == false;
                }

                Assert.True(localPassed, string.Format("{0}: Faulted handled correctly", localPassed ? "Success" : "Failure"));
            }

            // ASYNC (a copy of the sync but with constructors returning Task instead of void

            // Do everything twice - once through OfferMessage and Once through Post
            for (FeedMethod feedMethod = FeedMethod._First; feedMethod < FeedMethod._Count; feedMethod++)
            {
                Func<DataflowBlockOptions, TargetProperties<int>> actionBlockFactory =
                    options =>
                    {
                        ITargetBlock<int> target = new ActionBlock<int>(i => TrackCapturesAsync(i), (ExecutionDataflowBlockOptions)options);
                        return new TargetProperties<int> { Target = target, Capturer = target, ErrorVerifyable = true };
                    };
                CancellationTokenSource cancellationSource = new CancellationTokenSource();
                var defaultOptions = new ExecutionDataflowBlockOptions();
                var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount };
                var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 10 };
                var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 100, CancellationToken = cancellationSource.Token };

                Assert.True(FeedTarget(actionBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, defaultOptions, 10, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, dopOptions, 1000, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, mptOptions, 10000, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, mptOptions, 10000, Intervention.Complete, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, cancellationOptions, 10000, Intervention.Cancel, cancellationSource, feedMethod, true));
            }

            // Test scheduler usage
            {
                bool localPassed = true;
                var sts = new SimpleTaskScheduler();
                var ab = new ActionBlock<int>(i =>
                    {
                        localPassed &= TaskScheduler.Current.Id == sts.Id;
                        return Task.Factory.StartNew(() => { });
                    }, new ExecutionDataflowBlockOptions { TaskScheduler = sts, MaxDegreeOfParallelism = -1, MaxMessagesPerTask = 10 });
                for (int i = 0; i < 2; i++) ab.Post(i);
                ab.Complete();
                ab.Completion.Wait();
                Assert.True(localPassed, string.Format("{0}: Correct scheduler usage", localPassed ? "Success" : "Failure"));
            }

            // Test count
            {
                bool localPassed = true;
                var barrier1 = new Barrier(2);
                var barrier2 = new Barrier(2);
                var ab = new ActionBlock<int>(i => Task.Factory.StartNew(() =>
                {
                    barrier1.SignalAndWait();
                    barrier2.SignalAndWait();
                }));
                for (int iter = 0; iter < 2; iter++)
                {
                    for (int i = 1; i <= 2; i++) ab.Post(i);
                    for (int i = 1; i >= 0; i--)
                    {
                        barrier1.SignalAndWait();
                        localPassed &= i == ab.InputCount;
                        barrier2.SignalAndWait();
                    }
                }
                Assert.True(localPassed, string.Format("{0}: InputCount", localPassed ? "Success" : "Failure"));
            }

            // Test ordering
            {
                bool localPassed = true;
                int prev = -1;
                var ab = new ActionBlock<int>(i =>
                {
                    return Task.Factory.StartNew(() =>
                    {
                        if (prev + 1 != i) localPassed &= false;
                        prev = i;
                    });
                });
                for (int i = 0; i < 2; i++) ab.Post(i);
                ab.Complete();
                ab.Completion.Wait();
                Assert.True(localPassed, string.Format("{0}: Correct ordering", localPassed ? "Success" : "Failure"));
            }

            // Test non-greedy
            {
                bool localPassed = true;
                var barrier = new Barrier(2);
                var ab = new ActionBlock<int>(i =>
                {
                    return Task.Factory.StartNew(() =>
                    {
                        barrier.SignalAndWait();
                    });
                }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });
                ab.SendAsync(1);
                Task.Delay(200).Wait();
                var sa2 = ab.SendAsync(2);
                localPassed &= !sa2.IsCompleted;
                barrier.SignalAndWait(); // for SendAsync(1)
                barrier.SignalAndWait(); // for SendAsync(2)
                localPassed &= sa2.Wait(100);
                int total = 0;
                ab = new ActionBlock<int>(i =>
                {
                    return Task.Factory.StartNew(() =>
                    {
                        Interlocked.Add(ref total, i);
                        Task.Delay(1).Wait();
                    });
                }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });
                for (int i = 1; i <= 100; i++) ab.SendAsync(i);
                SpinWait.SpinUntil(() => total == ((100 * 101) / 2), 30000);
                localPassed &= total == ((100 * 101) / 2);
                Assert.True(localPassed, string.Format("total={0} (must be {1})", total, (100 * 101) / 2));
                Assert.True(localPassed, string.Format("{0}: Non-greedy support", localPassed ? "Success" : "Failure"));
            }

            // Test that OperationCanceledExceptions are ignored
            {
                bool localPassed = true;
                int sumOfOdds = 0;
                var ab = new ActionBlock<int>(i =>
                {
                    if ((i % 2) == 0) throw new OperationCanceledException();
                    return Task.Factory.StartNew(() => { sumOfOdds += i; });
                });
                for (int i = 0; i < 4; i++) ab.Post(i);
                ab.Complete();
                ab.Completion.Wait();
                localPassed = sumOfOdds == (1 + 3);
                Assert.True(localPassed, string.Format("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"));
            }

            // Test that null task is ignored
            {
                bool localPassed = true;
                int sumOfOdds = 0;
                var ab = new ActionBlock<int>(i =>
                {
                    if ((i % 2) == 0) return null;
                    return Task.Factory.StartNew(() => { sumOfOdds += i; });
                });
                for (int i = 0; i < 4; i++) ab.Post(i);
                ab.Complete();
                ab.Completion.Wait();
                localPassed = sumOfOdds == (1 + 3);
                Assert.True(localPassed, string.Format("{0}: null tasks are ignored", localPassed ? "Success" : "Failure"));
            }

            // Test faulting from the delegate
            {
                bool localPassed = true;
                var ab = new ActionBlock<int>(new Func<int, Task>(i => { throw new InvalidOperationException(); }));
                ab.Post(42);
                ab.Post(1);
                ab.Post(2);
                ab.Post(3);
                try { localPassed &= ab.Completion.Wait(100); }
                catch { }
                localPassed &= ab.Completion.IsFaulted;
                localPassed &= SpinWait.SpinUntil(() => ab.InputCount == 0, 500);
                localPassed &= ab.Post(4) == false;
                Assert.True(localPassed, string.Format("{0}: Faulted from delegate handled correctly", localPassed ? "Success" : "Failure"));
            }

            // Test faulting from the task
            {
                bool localPassed = true;
                var ab = new ActionBlock<int>(i => Task.Factory.StartNew(() => { throw new InvalidOperationException(); }));
                ab.Post(42);
                ab.Post(1);
                ab.Post(2);
                ab.Post(3);
                try { localPassed &= ab.Completion.Wait(100); }
                catch { }
                localPassed &= ab.Completion.IsFaulted;
                localPassed &= SpinWait.SpinUntil(() => ab.InputCount == 0, 500);
                localPassed &= ab.Post(4) == false;
                Assert.True(localPassed, string.Format("{0}: Faulted from task handled correctly", localPassed ? "Success" : "Failure"));
            }
        }
Esempio n. 36
0
        //[Fact(Skip = "Outerloop")]
        public void TestReleasingOfPostponedMessages()
        {
            const int excess = 5;
            for (int dop = 1; dop <= Parallelism.ActualDegreeOfParallelism; dop++)
            {
                var localPassed = true;
                var nextOfferEvent = new AutoResetEvent(true);
                var releaseProcessingEvent = new ManualResetEventSlim();
                var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, BoundedCapacity = dop };
                var action = new ActionBlock<int>(x => { nextOfferEvent.Set(); releaseProcessingEvent.Wait(); }, options);
                var sendAsyncDop = new Task<bool>[dop];
                var sendAsyncExcess = new Task<bool>[excess];

                // Send DOP messages
                for (int i = 0; i < dop; i++)
                {
                    // Throttle sending to make sure we saturate DOP exactly
                    nextOfferEvent.WaitOne();
                    sendAsyncDop[i] = action.SendAsync(i);
                }

                // Send EXCESS more messages. All of these will surely be postponed
                for (int i = 0; i < excess; i++)
                    sendAsyncExcess[i] = action.SendAsync(dop + i);

                // Wait until the tasks for the first DOP messages get completed
                Task.WaitAll(sendAsyncDop, 5000);

                // Complete the block. This will cause the EXCESS messages to be declined.
                action.Complete();
                releaseProcessingEvent.Set();

                // Verify all DOP messages have been accepted
                for (int i = 0; i < dop; i++) localPassed &= sendAsyncDop[i].Result;
                Assert.True(localPassed, string.Format("DOP={0} : Consumed up to DOP - {1}", dop, localPassed ? "Passed" : "FAILED"));


                // Verify all EXCESS messages have been declined
                localPassed = true;
                for (int i = 0; i < excess; i++) localPassed &= !sendAsyncExcess[i].Result;
                Assert.True(localPassed, string.Format("DOP={0} : Declined excess - {1}", dop, localPassed ? "Passed" : "FAILED"));
            }
        }