public void Run()
        {
            this.output.WriteLine("TPL start");
            this.CheckFileExists();

            var f = new FileInfo(this.sampleLogFileName);
            this.fileSizeInBytes = f.Length;
            this.lineSizeInBytesSoFar = 0;
            this.lineCount = 0;

            var sw = new Stopwatch();
            sw.Start();

            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 8 };

            var ab = new ActionBlock<string>(s => this.ProcessLine(s), options);

            using (TextReader reader = File.OpenText(this.sampleLogFileName))
            {
                string line;
                while ((line = reader.ReadLine()) != null)
                {
                    ab.Post(line);
                }
            }

            ab.Complete();
            ab.Completion.Wait();

            sw.Stop();
            this.ShowResults();

            this.output.WriteLine();
            this.output.WriteLine("TPL done in {0}", sw.Elapsed);
        }
Beispiel #2
0
        public Engine(IWorkItemRepository repository, IActivityRunner activityRunner,
            IStateMachineProvider stateMachineProvider)
        {
            if (repository == null) throw new ArgumentNullException("repository");
            if (activityRunner == null) throw new ArgumentNullException("activityRunner");
            if (stateMachineProvider == null) throw new ArgumentNullException("stateMachineProvider");

            _repository = repository;
            _activityRunner = activityRunner;
            _stateMachineProvider = stateMachineProvider;

            _stateQueue = new ActionBlock<int>(id => UpdateState(id),
                new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = 1
                });

            _workerQueue = new ActionBlock<int>(id => RunActivity(id),
                new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = int.MaxValue
                });

            _stateQueue.Completion.ContinueWith(t => { _workerQueue.Complete(); }, TaskContinuationOptions.OnlyOnFaulted);

            _workerQueue.Completion.ContinueWith(t => { ((IDataflowBlock) _stateQueue).Fault(t.Exception); },
                TaskContinuationOptions.OnlyOnFaulted);
        }
Beispiel #3
0
    public void Configure(string collectorName, XElement configElement, ISystemMetricsService systemMetrics)
    {
      _log = SuperCheapIOC.Resolve<ILog>();
      _systemMetrics = systemMetrics;

      var config = new SqlServerConfiguration(configElement.Attribute("connectionString").Value, configElement.ToInt("writeBatchSize"));

      _connectionString = config.ConnectionString;
      _collectorName = collectorName;
      _retries = config.Retries;

      InitialiseRetryHandling();

      _batchBlock = new BatchBlock<GraphiteLine>(config.WriteBatchSize);
      _actionBlock = new ActionBlock<GraphiteLine[]>(p => SendToDB(p), new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = 1 });
      _batchBlock.LinkTo(_actionBlock);

      _batchBlock.Completion.ContinueWith(p => _actionBlock.Complete());
      _actionBlock.Completion.ContinueWith(p => { _isActive = false; });

      _completionTask = new Task(() =>
      {
        _log.Info("SqlServerBackend - Completion has been signaled. Waiting for action block to complete.");
        _batchBlock.Complete();
        _actionBlock.Completion.Wait();
      });

    }
Beispiel #4
0
        public void Complete()
        {
            //_bufferBlock?.Complete();
            _actionBlock?.Complete();

            //_bufferBlock = null;
            _actionBlock = null;
        }
Beispiel #5
0
        /// <summary>
        /// Subscribe.
        /// </summary>
        /// <param name="uriPath"></param>
        /// <param name="callback"></param>
        /// <param name="token"></param>
        /// <returns></returns>
        protected async Task SubscribeToAsync(string uriPath, Symbol symbol, Action <TEventArgs> callback, CancellationToken token)
        {
            Logger?.LogInformation($"{GetType().Name}.{nameof(SubscribeToAsync)}: \"{BaseUri}{uriPath}\"");

            IsSubscribed = true;

            var bufferBlock = new BufferBlock <string>(new DataflowBlockOptions
            {
                EnsureOrdered      = true,
                CancellationToken  = token,
                BoundedCapacity    = DataflowBlockOptions.Unbounded,
                MaxMessagesPerTask = DataflowBlockOptions.Unbounded
            });

            var actionBlock = new ActionBlock <string>(json =>
            {
                try { DeserializeJsonAndRaiseEvent(json, symbol, token, callback); }
                catch (OperationCanceledException) { }
                catch (Exception e)
                {
                    if (!token.IsCancellationRequested)
                    {
                        Logger?.LogError(e, $"{GetType().Name}: Unhandled {nameof(DeserializeJsonAndRaiseEvent)} exception.");
                    }
                }
            },
                                                       new ExecutionDataflowBlockOptions
            {
                BoundedCapacity           = 1,
                EnsureOrdered             = true,
                MaxDegreeOfParallelism    = 1,
                CancellationToken         = token,
                SingleProducerConstrained = true
            });

            try
            {
                bufferBlock.LinkTo(actionBlock);
                var uri = new Uri($"{BaseUri}{uriPath}");
                await WebSocket.RunAsync(uri, (sender, args) => OnClientMessage(args.Message, bufferBlock), token : token).ConfigureAwait(false);
            }
            catch (OperationCanceledException) { }
            catch (Exception e)
            {
                if (!token.IsCancellationRequested)
                {
                    Logger?.LogError(e, $"{GetType().Name}.{nameof(SubscribeToAsync)}");
                    throw;
                }
            }
            finally
            {
                bufferBlock?.Complete();
                actionBlock?.Complete();

                IsSubscribed = false;
            }
        }
Beispiel #6
0
        public virtual void UnLink()
        {
            _callback = null;

            _isLinked = false;

            _bufferBlock?.Complete();
            _actionBlock?.Complete();
        }
Beispiel #7
0
 public void TestPost()
 {
     foreach (bool bounded in DataflowTestHelpers.BooleanValues)
     {
         ActionBlock<int> ab = new ActionBlock<int>(i => { },
             new ExecutionDataflowBlockOptions { BoundedCapacity = bounded ? 1 : -1 }); // test greedy and then non-greedy
         Assert.True(ab.Post(0), "Expected non-completed ActionBlock to accept Post'd message");
         ab.Complete();
         Assert.False(ab.Post(0), "Expected Complete'd ActionBlock to decline messages");
     }
 }
Beispiel #8
0
        public override void Run(bool runChildren)
        {
            DataRowBlock = new ActionBlock<DataRowObject>((n) => NextDataRow(n));

            SetEndAction(() => {
                DataRowBlock.Complete();
                DataRowBlock.Completion.Wait();
            });

            base.Run(runChildren);
        }
Beispiel #9
0
        static public void ProcessingByTPL_StraightForwardImplementation()
        {
            const string pathToFiles = @"..\..\..\..\DataFiles";
            string[] files = Directory.GetFiles(pathToFiles, "*.txt", SearchOption.AllDirectories);

            var loadDataFromFileBlock = new TransformBlock<string[], List<CustomerTextData>>(fileItems =>
            {
                var factory = new CustomerTextDataFactory();
                return new List<CustomerTextData>(Array.ConvertAll(fileItems, factory.LoadFromFile));
            });
            var filterBlock = new TransformBlock<List<CustomerTextData>, List<CustomerTextData>>(textDataList =>
            {
                var filter = new FilterTextData(5);
                return textDataList.Where(filter.Run).ToList();
            });
            var toListBlock = new TransformManyBlock<List<CustomerTextData>, CustomerTextData>(textDataList =>
            {
                var queue = new ConcurrentQueue<CustomerTextData>();
                textDataList.ForEach(queue.Enqueue);
                return queue;
            });
            var action = new ActionBlock<CustomerTextData>(textData =>
            {
                var weight = new WeightTextData();
                int result = weight.Run(textData);
                Trace.WriteLine(result);
                Console.WriteLine(result);
            });

            loadDataFromFileBlock.LinkTo(filterBlock);
            filterBlock.LinkTo(toListBlock);
            toListBlock.LinkTo(action);

            loadDataFromFileBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)filterBlock).Fault(t.Exception);
                else filterBlock.Complete();
            });
            filterBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)toListBlock).Fault(t.Exception);
                else toListBlock.Complete();
            });
            toListBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)action).Fault(t.Exception);
                else action.Complete();
            });

            loadDataFromFileBlock.Post(files);
            loadDataFromFileBlock.Complete();
            action.Completion.Wait();
        }
Beispiel #10
0
		public void CompleteTest ()
		{
			var block = new ActionBlock<int> (i => Thread.Sleep (100));

			for (int i = 0; i < 10; i++)
				Assert.IsTrue (block.Post (i), "Not Accepted");

			block.Complete ();
			// Still element to be processed so Completion should be false
			Assert.IsFalse (block.Completion.IsCompleted);
			block.Completion.Wait ();
			Assert.IsTrue (block.Completion.IsCompleted);
		}
Beispiel #11
0
		public void AsyncNullTest()
		{
			var scheduler = new TestScheduler ();
			var block = new ActionBlock<int> (
				i => null,
				new ExecutionDataflowBlockOptions { TaskScheduler = scheduler });

			Assert.IsTrue (block.Post (1));

			scheduler.ExecuteAll ();

			Assert.IsFalse (block.Completion.Wait (100));

			block.Complete ();

			Assert.IsTrue (block.Completion.Wait (100));
		}
Beispiel #12
0
        internal static bool TransformThroughFilterToAction()
        {
            const int ITERS = 2;
            int completedCount = 0;

            var t = new TransformBlock<int, int>(i => i);
            var c = new ActionBlock<int>(i => completedCount++);

            t.LinkTo(c, i => true);
            t.Completion.ContinueWith(_ => c.Complete());

            for (int i = 0; i < ITERS; i++) t.Post(i);
            t.Complete();
            c.Completion.Wait();

            return completedCount == ITERS;
        }
Beispiel #13
0
 private void Stop(Exception exception = null)
 {
     EnsureIsNotDisposed();
     if (_state != BotState.Started)
     {
         return;
     }
     _processor?.Post(new BotStopped());
     _processor?.Complete();
     _exception = _exception ?? exception;
     if (_exception == null)
     {
         _subject.OnCompleted();
     }
     else
     {
         _subject.OnError(_exception);
     }
     _state = BotState.Stopped;
 }
        public void ProduceLogs(int count, int buffSize)
        {
            var options = new ExecutionDataflowBlockOptions() { BoundedCapacity = buffSize, MaxDegreeOfParallelism = 1, SingleProducerConstrained = true };

            LogGenerator g = new LogGenerator();

            var file = new StreamWriter("basic.async.log", false);

            ActionBlock<string> writer = new ActionBlock<string>(s => file.WriteLine(s), options);

            for (int i = 0; i < count; i++)
            {
                g.Next();

                var line = string.Format(g.FormatStr, g.Param1, g.Param2, g.Param3, g.Param4, g.Param5, g.Param6);
                writer.SendAsync(line).Wait();
            }

            writer.Complete();

            Completed = writer.Completion.ContinueWith(t => file.Close());
        }
Beispiel #15
0
        private static void EvaluateForests(Args arguments)
        {
            Contract.Requires(arguments.InputDirectory != null, "You must specify an input directory");
            Contract.Requires(Directory.Exists(arguments.InputDirectory), "The input directory must exist");
            // we have a bunch of forests and we will compare them by classifying against
            // unknown samples. The forests are in weka format (wtree)
            s_logger.Info("Evaluating random forests...");
            // create the tree from the sample csvs...
            var treeEvaluationBlock = new ActionBlock <string>(i =>
            {
                var classifier = RandomForest.FromWekaFile(i);
                s_logger.Info($"Evaluating forest from [{i}] against universe");
                // and now evaluate
                foreach (var evaluationFile in Directory.EnumerateFiles(arguments.InputDirectory, "*.csv"))
                {
                    if (!evaluationFile.Contains("-sample"))
                    {
                        classifier.EvaluateOnTrainingSet(evaluationFile, true, false, 0);
                    }
                }
            },
                                                               new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = 1
            }
                                                               );

            // post each action
            foreach (var treeFile in Directory.EnumerateFiles(arguments.InputDirectory, "*.wtree"))
            {
                treeEvaluationBlock.Post(treeFile);
            }
            // complete
            treeEvaluationBlock.Complete();
            // wait
            treeEvaluationBlock.Completion.Wait();
            // done...
        }
Beispiel #16
0
        public async Task <IList <JsonFile> > ExecuteAsync(string vam, IList <FreeFile> files, IFilter filter, ErrorReportingOptions warnings)
        {
            var filesIndex = new ConcurrentDictionary <string, FreeFile>(files.ToDictionary(f => f.Path, f => f));

            using (var reporter = new ProgressReporter <ProgressInfo>(StartProgress, ReportProgress, CompleteProgress))
            {
                var potentialScenes = files
                                      .Where(f => f.Extension == ".json")
                                      .Where(f => !filter.IsFiltered(f.LocalPath))
                                      .ToList();

                var scanSceneBlock = new ActionBlock <FreeFile>(
                    s => ScanSceneAsync(vam, s, potentialScenes.Count, filesIndex, reporter),
                    new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = 4
                });

                foreach (var potentialScene in potentialScenes)
                {
                    scanSceneBlock.Post(potentialScene);
                }

                scanSceneBlock.Complete();
                await scanSceneBlock.Completion;
            }

            Output.WriteLine($"Scanned {_scanned} scenes for references.");

            var scenes = _scenes.OrderBy(s => s.File.LocalPath).ToList();

            if (warnings == ErrorReportingOptions.ShowWarnings)
            {
                PrintWarnings(scenes);
            }

            return(scenes);
        }
Beispiel #17
0
        public async Task WriteSequentialStream(int writes)
        {
            var stream   = _streams.Open("visits");
            var progress = new ProgressBar(40);

            progress.Report(0);

            int written = 0;

            async Task Write(int c)
            {
                await stream.AppendAsync(c, c.ToString()).ConfigureAwait(false);

                Interlocked.Increment(ref written);
                // ReSharper disable once AccessToDisposedClosure
                progress.Report((double)written / writes);
            }

            var worker = new ActionBlock <int>(Write, _unboundedOptions);

            var sw = new Stopwatch();

            sw.Start();

            for (int c = 1; c <= writes; c++)
            {
                await worker.SendAsync(c);
            }

            worker.Complete();
            await worker.Completion;

            sw.Stop();

            progress.Dispose();

            _reporter.Report($"Written {writes} in {sw.ElapsedMilliseconds}ms (batch time)");
        }
        private async Task <ConcurrentDictionary <string, List <Document> > > ReadDocuments(CancellationToken cancellation = default)
        {
            using (var operation = this.StartOperation(_telemetry))
            {
                _logger.LogInformation($"Started reading documents from {_source.Db}/{_source.Collection}");

                var docsByType = new ConcurrentDictionary <string, List <Document> >();
                var block      = new ActionBlock <string>(
                    async(docType) =>
                {
                    var query = new SqlQuerySpec(
                        "select * from c where c.documentType = @documentType",
                        new SqlParameterCollection(new[]
                    {
                        new SqlParameter("@documentType", docType)
                    }));
                    var documents = await _sourceClient.Query <Document>(query);
                    var docs      = documents.ToList();
                    docsByType.AddOrUpdate(docType, docs, (k, v) => docs);
                    _logger.LogInformation($"Read {docs.Count} documents for type {docType}.");
                },
                    new ExecutionDataflowBlockOptions()
                {
                    MaxDegreeOfParallelism = _syncSettings.MaxDegreeOfParallelism,
                    CancellationToken      = cancellation
                });
                foreach (var docType in _syncSettings.DocumentTypes)
                {
                    block.Post(docType);
                }
                block.Complete();
                await block.Completion;

                _logger.LogInformation($"Total of {docsByType.Sum(kvp => kvp.Value.Count)} documents found.");

                return(docsByType);
            }
        }
Beispiel #19
0
        /// <summary>
        /// Read the function
        /// </summary>
        public void Read()
        {
            // Request the first page
            var urlPage1  = GetApiPerPage(1);
            var xmlParser = RequestXmlResponse(urlPage1);

            // Read header
            xmlParser.Select(config.RootXPath);
            TotalPages = Convert.ToInt32(xmlParser.GetAttribute("pages"));

            var ApiRequestList = new List <string>();

            ForLoop.Run(2, TotalPages, (i) => ApiRequestList.Add(GetApiPerPage(i)));

            // Tasks
            xmlParser.Select(config.XPathDataNodes);
            taskQueue.SendAsync(new WBWebArgs()
            {
                urlPage   = urlPage1,
                XmlParser = xmlParser
            });

            foreach (var api in ApiRequestList)
            {
                taskQueue.SendAsync(new WBWebArgs()
                {
                    urlPage = api
                });
            }

            taskQueue.Complete();
            taskQueue.Completion.Wait();

            batchResultBlock.Complete();
            persistenceAction.Completion.Wait();

            OnCompleted();
        }
        public async Task SimpleException()
        {
            var block = new ActionBlock <int>(n =>
            {
                if (n == 5)
                {
                    throw new Exception();
                }

                Console.WriteLine(n);
            });

            for (int i = 0; i < 10; i++)
            {
                block.Post(i);
            }

            block.Complete();

            Console.WriteLine("Done!");

            await block.Completion;
        }
        private async Task ForwardData()
        {
            while (true)
            {
                await Task.WhenAny(highPriorityTarget.OutputAvailableAsync(),
                                   lowPriorityTarget.OutputAvailableAsync());

                T item;
                if (highPriorityTarget.TryReceive(out item))
                {
                    await actBlock.SendAsync(item);
                }
                else if (lowPriorityTarget.TryReceive(out item))
                {
                    await actBlock.SendAsync(item);
                }
                else
                {
                    actBlock.Complete();
                    return;
                }
            }
        }
Beispiel #22
0
        private static async void AsyncThrottling()
        {
            var consumer = new ActionBlock <int>(async x =>
            {
                await Task.Delay(200);
                Console.WriteLine(x);
            }, new ExecutionDataflowBlockOptions {
                BoundedCapacity = 1
            }
                                                 );

            var producer = new ActionBlock <int>(async x =>
            {
                foreach (var i in Enumerable.Range(0, 100000000))
                {
                    await consumer.SendAsync(i);
                }
            });

            producer.Post(1);
            producer.Complete();
            await producer.Completion;
        }
Beispiel #23
0
        public async Task <List <JobEntity> > GetDescendantEntitiesAsync(long id)
        {
            await _client.GetGrain <IJobGrain>(id).GetJobEntityAsync();

            var childrenIds = await _client.GetGrain <IDescendantsRefGrain>(id).GetChildrenAsync();

            var jobs = new ConcurrentBag <JobEntityState>();
            var getJobInfoProcessor = new ActionBlock <long>(async jobId =>
            {
                var jobGrain = _client.GetGrain <IJobGrain>(jobId);
                jobs.Add(await jobGrain.GetJobEntityAsync());
            }, Helper.GetOutOfGrainExecutionOptions());

            foreach (var childJobId in childrenIds)
            {
                await getJobInfoProcessor.PostToBlockUntilSuccessAsync(childJobId);
            }

            getJobInfoProcessor.Complete();
            await getJobInfoProcessor.Completion;

            return(_mapper.Map <List <JobEntity> >(jobs.OrderBy(s => s.JobId).ToList()));
        }
Beispiel #24
0
        /// <summary>
        /// Executes given lambda parallelly on given data set with max degree of parallelism set up
        /// </summary>
        /// <typeparam name="T">The item type</typeparam>
        /// <param name="source">Data to process</param>
        /// <param name="body">Lambda to execute on all items</param>
        /// <param name="maxDegreeOfParallelism">Max degree of parallelism (-1 for unbounded execution)</param>
        /// <returns></returns>
        /// <param name="cts">Cancellation token, stops all remaining operations</param>
        /// <param name="scheduler">Task scheduler on which to execute `body`</param>
        /// <returns></returns>
        public static async Task ParallelForEach <T>(this IEnumerable <T> source, Func <T, Task> body, int maxDegreeOfParallelism = DataflowBlockOptions.Unbounded, CancellationToken cts = default, TaskScheduler scheduler = null)
        {
            var options = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = maxDegreeOfParallelism,
                CancellationToken      = cts
            };

            if (scheduler != null)
            {
                options.TaskScheduler = scheduler;
            }

            var block = new ActionBlock <T>(body, options);

            foreach (var item in source)
            {
                block.Post(item);
            }

            block.Complete();
            await block.Completion;
        }
        /// <summary>
        /// Submits the messages asynchronous.
        /// </summary>
        /// <param name="messages">The messages.</param>
        /// <param name="cancellationToken">The cancellation token.</param>
        /// <returns>
        /// The <see cref="Task" />
        /// </returns>
        public async Task SubmitMessagesAsync(
            IEnumerable <CloudQueueMessage> messages,
            CancellationToken cancellationToken = default(CancellationToken))
        {
            var executionDatafileBlockOptions = new ExecutionDataflowBlockOptions
            {
                CancellationToken      = cancellationToken,
                MaxDegreeOfParallelism = 100
            };

            var actionBlock = new ActionBlock <CloudQueueMessage>(
                async entity => { await this.SubmitMessageAsync(entity, cancellationToken).ConfigureAwait(false); },
                executionDatafileBlockOptions);

            foreach (var message in messages)
            {
                actionBlock.Post(message);
            }

            actionBlock.Complete();

            await actionBlock.Completion.ConfigureAwait(false);
        }
Beispiel #26
0
        static TimeSpan ComputeTime(int maxDegreeOfParallelism, int messageCount)
        {
            var actionBlock = new ActionBlock <int>(
                millisecondsTimeout => Thread.Sleep(millisecondsTimeout),
                new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = maxDegreeOfParallelism
            });

            Stopwatch sw = new Stopwatch();

            sw.Start();

            for (int i = 0; i < messageCount; i++)
            {
                actionBlock.Post(1000);
            }
            actionBlock.Complete();
            actionBlock.Completion.Wait();
            sw.Stop();

            return(sw.Elapsed);
        }
Beispiel #27
0
        static void AddPersonBatched()
        {
            //监视
            timer.Elapsed += Timer_Elapsed;

            //定义接收后执行的操作
            var insertpersons = new ActionBlock <Person[]>(p => InsertPersons(p));

            //连接数据源
            batchpersons.LinkTo(insertpersons);

            //post数据的入口1
            PostData(batchpersons);

            //post数据的入口2
            PostDataByAnotherChanel(batchpersons);

            batchpersons.Completion.ContinueWith(delegate { insertpersons.Complete(); });

            batchpersons.Complete();

            insertpersons.Completion.Wait();
        }
        public async Task TestNullReturnedTasks()
        {
            int sumOfOdds = 0;

            var ab = new ActionBlock <int>(i =>
            {
                if ((i % 2) == 0)
                {
                    return(null);
                }
                return(TaskShim.Run(() => { sumOfOdds += i; }));
            });

            const int MaxValue = 10;

            ab.PostRange(0, MaxValue);
            ab.Complete();
            await ab.Completion;

            Assert.Equal(
                expected: Enumerable.Range(0, MaxValue).Where(i => i % 2 != 0).Sum(),
                actual: sumOfOdds);
        }
        public async Task TestParallelExecution()
        {
            int dop = 2;

            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            {
                foreach (bool singleProducerConstrained in DataflowTestHelpers.BooleanValues)
                {
                    Barrier barrier = new Barrier(dop);
                    var     options = new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = dop, SingleProducerConstrained = singleProducerConstrained
                    };
                    ActionBlock <int> ab = sync ?
                                           new ActionBlock <int>(_ => barrier.SignalAndWait(), options) :
                                           new ActionBlock <int>(_ => TaskShim.Run(() => barrier.SignalAndWait()), options);

                    int iters = dop * 4;
                    ab.PostRange(0, iters);
                    ab.Complete();
                    await ab.Completion;
                }
            }
        }
Beispiel #30
0
        private static async Task CheckForNewSeasonsAsync(IEnumerable <string> tvShows)
        {
            var worker = new ActionBlock <string>(async tvShow =>
            {
                var(newSeasonAired, newSeason) = await _seasonChecker.TryCheckForNewSeasonAsync(tvShow);
                if (newSeasonAired)
                {
                    _notificationService.NotifyNewSeason(newSeason);
                }
            },
                                                  new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = 50,
            });

            foreach (var tvShow in tvShows)
            {
                await worker.SendAsync(tvShow);
            }

            worker.Complete();
            await worker.Completion;
        }
        public static async Task DoItAsync(CancellationToken cancellationToken)
        {
            // ForEachAsync method is not meant to handle async methods.

            //using (MonitorDirectory monitor = new MonitorDirectory("monitor"))
            //{
            //  await monitor.ChangeObservable.ForEachAsync(
            //    async (MonitorChangeState state) => await processChangeStateAsync(state), cancellationToken);
            //}

            using (MonitorDirectory monitor = new MonitorDirectory("monitor"))
            {
                try
                {
                    await monitor.ChangeObservable.ForEachAsync(x => _actionBlock.Post(x), cancellationToken);
                }
                catch (OperationCanceledException) { }
            }

            _actionBlock.Complete();

            await _actionBlock.Completion;
        }
Beispiel #32
0
        internal static void Run()
        {
            // Create an ActionBlock<int> object that prints its input.
            var printMe = new ActionBlock <int>(n =>
            {
                Console.WriteLine("n = {0}", n);
            });

            // Create a continuation task that prints the overall
            // task status to the console when the block finishes.
            var lastTask = printMe.Completion.ContinueWith(task =>
            {
                Console.WriteLine("The status of the completion task is '{0}'.", task.Status);
            });

            // Only good values, POst.
            printMe.Post(0);
            printMe.Post(1);
            printMe.Post(2);
            printMe.Post(3);
            printMe.Complete();
            lastTask.Wait();
        }
    public static async Task <IReadOnlyCollection <T> > WhenAllParallelAsync <T>(IAsyncEnumerable <T> source,
                                                                                 Func <T, CancellationToken, Task <T> > action,
                                                                                 CancellationToken token,
                                                                                 int?maxDegreeOfParallelism = default,
                                                                                 TaskScheduler?scheduler    = null)
    {
        var results = new ConcurrentBag <T>();

        var options = new ExecutionDataflowBlockOptions
        {
            MaxDegreeOfParallelism = maxDegreeOfParallelism ?? DataflowBlockOptions.Unbounded
        };

        if (scheduler is not null)
        {
            options.TaskScheduler = scheduler;
        }

        async Task AwaitItem(T item)
        {
            T result = await action(item, token);

            results.Add(result);
        }

        var block = new ActionBlock <T>(AwaitItem, options);

        await foreach (T item in source.WithCancellation(token))
        {
            block.Post(item);
        }

        block.Complete();
        await block.Completion;

        return(results);
    }
Beispiel #34
0
        public virtual async Task Purge(IEnumerable <Guid> ids)
        {
            if (ids.Any())
            {
                using (var connection = await Connect())
                {
                    // First, capture the invocation history into blobs
                    var rows = await connection.QueryAsync <InvocationState.InvocationRow>(
                        "work.GetInvocationHistory",
                        new { Ids = new IdListParameter(ids) },
                        commandType : CommandType.StoredProcedure);

                    // Group by Id
                    var invocationHistories = rows.GroupBy(r => r.Id);

                    // Record invocation histories using a dataflow so we can constraint the parallelism
                    var block = new ActionBlock <IOrderedEnumerable <InvocationState.InvocationRow> >(
                        i => ArchiveInvocation(i),
                        new ExecutionDataflowBlockOptions()
                    {
                        MaxDegreeOfParallelism = 16
                    });
                    foreach (var invocationHistory in invocationHistories)
                    {
                        block.Post(invocationHistory.OrderBy(h => h.UpdatedAt));
                    }
                    block.Complete();
                    await block.Completion;

                    // Now purge the invocations
                    await connection.QueryAsync <int>(
                        "work.PurgeInvocations",
                        new { Ids = new IdListParameter(ids) },
                        commandType : CommandType.StoredProcedure);
                }
            }
        }
Beispiel #35
0
        private (ITargetBlock <string>, IDataflowBlock) BuildPipeline()
        {
            var dataFlowOptions = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = configuration.MaxDegreeOfParallelism
            };
            TransformBlock <string, LogEvent> parseJson = new TransformBlock <string, LogEvent>(input =>
            {
                return(ParseLogEvent(input));
            }, dataFlowOptions);

            var matchEvents = new TransformBlock <LogEvent, LogEventDetails>(input =>
            {
                return(ProcessLogEvent(input));
            }, dataFlowOptions);
            var batchEventDetails  = new BatchBlock <LogEventDetails>(configuration.InsertBatchSize);
            var insertEventDetails = new ActionBlock <LogEventDetails[]>(async chunk =>
            {
                await eventPersistence.Persist(chunk);
            }, new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = configuration.MaxDegreeOfParallelism * 4
            });
            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            parseJson.LinkTo(matchEvents, linkOptions, input => input != null);
            // drain null values
            parseJson.LinkTo(DataflowBlock.NullTarget <LogEvent>(), linkOptions);
            matchEvents.LinkTo(batchEventDetails, linkOptions, input => input != null);
            // drain null values
            matchEvents.LinkTo(DataflowBlock.NullTarget <LogEventDetails>(), linkOptions);
            batchEventDetails.LinkTo(insertEventDetails, linkOptions);
            // When the batch block completes, set the action block also to complete.
            batchEventDetails.Completion.ContinueWith(delegate { insertEventDetails.Complete(); });
            return(parseJson, insertEventDetails);
        }
        public LoopDataflow2()
        {
            var options = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = 100
            };

            InputMessageBlock   = new TransformBlock <Message, Message>(async msg => await InputMessage(msg));
            HandleMessageBlock  = new TransformManyBlock <Message, Message>(async msg => await HandleMessage(msg), options);
            HandleMessageBlock2 = new TransformManyBlock <Message, Message>(async msg => await HandleMessage2(msg), options);
            OutputMessageBlock  = new ActionBlock <Message>(msg => OutputMessage(msg), options);

            var linkOptions = new DataflowLinkOptions()
            {
                PropagateCompletion = false
            };

            InputMessageBlock.LinkTo(HandleMessageBlock, linkOptions);
            HandleMessageBlock.LinkTo(OutputMessageBlock, linkOptions, msg => msg.WasProcessed == true);
            HandleMessageBlock.LinkTo(HandleMessageBlock2, linkOptions, msg => msg.WasProcessed == false);
            HandleMessageBlock2.LinkTo(OutputMessageBlock, linkOptions, msg => msg.WasProcessed == true);
            HandleMessageBlock2.LinkTo(HandleMessageBlock, linkOptions, msg => msg.WasProcessed == false);

            InputMessageBlock.Completion.ContinueWith(async tsk => {
                await BothMessageHandlersAreComplete();
                HandleMessageBlock.Complete();
            });

            HandleMessageBlock.Completion.ContinueWith(tsk => {
                HandleMessageBlock2.Complete();
            });

            HandleMessageBlock2.Completion.ContinueWith(tsk => {
                OutputMessageBlock.Complete();
            });
            DebuggingLoop();
        }
Beispiel #37
0
        public static void DeCompress(Stream inputStream, Stream outputStream)
        {
            var buffer       = new BufferBlock <byte[]>();
            var decompressor = new TransformBlock <byte[], byte[]>(bytes => DeCompress(bytes));
            var writer       = new ActionBlock <byte[]>(bytes => outputStream.Write(bytes, 0, bytes.Length));

            buffer.LinkTo(decompressor);
            buffer.Completion.ContinueWith(task => decompressor.Complete());

            decompressor.LinkTo(writer);
            decompressor.Completion.ContinueWith(task => writer.Complete());

            var readBuffer = new byte[BufferSize];

            while (true)
            {
                int readCount = inputStream.Read(readBuffer, 0, BufferSize);
                if (readCount > 0)
                {
                    var bytes = new byte[readCount];
                    Buffer.BlockCopy(readBuffer, 0, bytes, 0, readCount);
                    while (!buffer.Post(bytes))
                    {
                    }
                }
                if (readCount != BufferSize)
                {
                    buffer.Complete();
                    break;
                }
            }
            writer.Completion.Wait();

            outputStream.Flush();
            inputStream.Dispose();
            outputStream.Dispose();
        }
Beispiel #38
0
        static void Main(string[] args)
        {
            var producer1 = new TransformBlock <string, string>(n =>
            {
                Task.Delay(150).Wait();
                return(n);
            });

            var producer2 = new TransformBlock <string, string>(n =>
            {
                Task.Delay(500).Wait();
                return(n);
            });

            var printBlock = new ActionBlock <string>(n => Console.WriteLine(n));

            producer1.LinkTo(printBlock);
            producer2.LinkTo(printBlock);

            for (int i = 0; i < 10; i++)
            {
                producer1.Post($"Producer 1 message {i}");
                producer1.Post($"Producer 2 message {i}");
            }

            producer1.Complete();
            producer2.Complete();

            Task.WhenAll(
                producer1.Completion,
                producer1.Completion
                ).ContinueWith(a => printBlock.Complete());

            printBlock.Completion.Wait();

            Console.WriteLine("Finished!");
        }
        public async Task <HttpResponseMessage> PrepareUpload(string path, string filename, int length, string distinctName = null, int domainID = 0)
        {
            try
            {
                Logger.Get().AppendFormat(" PrepareUpload domainID:{0}", domainID);
                Logger.Get().AppendFormat("PrepareUpload url:{0}", Request.RequestUri.ToString());
                this.OnlyAllowInternalAccess();

                List <FileManagerBase> clients = FileManagerBase.GetAllManagers(distinctName, domainID);

                var action = new ActionBlock <FileManagerBase>(async client =>
                {
                    await client.PrepareUpload(path, filename, length);
                }, new ExecutionDataflowBlockOptions()
                {
                    MaxDegreeOfParallelism = clients.Count,
                });
                clients.ForEach((c) => action.Post(c));

                action.Complete();
                await action.Completion;

                return(new HttpResponseMessage()
                {
                    Content = new StringContent("{ \"success\" : true }", Encoding.UTF8, "text/javascript")
                });
            }
            catch (Exception ex)
            {
                Logger.Get().Append(ex);
                string json = string.Format("{{ \"success\" : false, \"error\" : \"{0}\" }}", HttpUtility.JavaScriptStringEncode(ex.Message));
                return(new HttpResponseMessage()
                {
                    Content = new StringContent(json, Encoding.UTF8, "text/javascript")
                });
            }
        }
        public async Task TestInputCount()
        {
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            {
                foreach (bool singleProducerConstrained in DataflowTestHelpers.BooleanValues)
                {
                    Barrier barrier1 = new Barrier(2), barrier2 = new Barrier(2);
                    var     options = new ExecutionDataflowBlockOptions {
                        SingleProducerConstrained = singleProducerConstrained
                    };
                    Action <int> body = _ =>
                    {
                        barrier1.SignalAndWait();
                        // will test InputCount here
                        barrier2.SignalAndWait();
                    };

                    ActionBlock <int> ab = sync ?
                                           new ActionBlock <int>(body, options) :
                                           new ActionBlock <int>(i => TaskShim.Run(() => body(i)), options);

                    for (int iter = 0; iter < 2; iter++)
                    {
                        ab.PostItems(1, 2);
                        for (int i = 1; i >= 0; i--)
                        {
                            barrier1.SignalAndWait();
                            Assert.Equal(expected: i, actual: ab.InputCount);
                            barrier2.SignalAndWait();
                        }
                    }

                    ab.Complete();
                    await ab.Completion;
                }
            }
        }
Beispiel #41
0
		public void DefaultSchedulerIsDefaultTest ()
		{
			var scheduler = new TestScheduler ();
			var factory = new TaskFactory (scheduler);

			ActionBlock<int> action = null;

			var task = factory.StartNew (() =>
			{
				Assert.AreEqual (scheduler, TaskScheduler.Current);

				action = new ActionBlock<int> (
					i => Assert.AreNotEqual (scheduler, TaskScheduler.Current));
				Assert.IsTrue (action.Post (1));
				action.Complete ();
			});

			Assert.AreEqual (1, scheduler.ExecuteAll ());

			Assert.IsNotNull (action);

			Assert.IsTrue (action.Completion.Wait (1000));
			Assert.IsTrue (task.Wait (0));
		}
        internal Task SendBatchAsync(IEnumerable<BrokeredMessage> messages)
        {
            var postBlock = new ActionBlock<IGrouping<string, BrokeredMessage>>((group) =>
            {

                var r = R.Next(_scaleCount);
                TopicClient client = GetClient(group.Key, r);

                Logger.TraceFormat("Posting Messages onto Topic {1} '{0}'", client.Path, r);

                return client.SendBatchAsync(messages);
            });


            foreach (var group in messages.GroupBy(m => m.CorrelationId ?? DEFAULT_COORELATION_ID))
                postBlock.Post(group);

            postBlock.Complete();
            return postBlock.Completion;

        }
Beispiel #43
0
        //[Fact(Skip = "Outerloop")]
        public void TestDynamicParallelism()
        {
            bool passed = false, executingFirst = false;
            const int firstItem = 1;
            const int secondItem = 2;

            int maxDOP = Parallelism.ActualDegreeOfParallelism > 1 ? Parallelism.ActualDegreeOfParallelism : 2; // Must be >= 2
            int maxMPT = Int32.MaxValue;
            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDOP, MaxMessagesPerTask = maxMPT };

            ActionBlock<int> action = new ActionBlock<int>((item) =>
            {
                if (item == firstItem)
                {
                    executingFirst = true;
                    Task.Delay(100).Wait();
                    executingFirst = false;
                }

                if (item == secondItem)
                {
                    passed = executingFirst;
                }
            }, options);

            BufferBlock<int> buffer = new BufferBlock<int>();
            buffer.LinkTo(action);

            buffer.Post(firstItem);
            Task.Delay(1).Wait(); // Make sure item 2 propagates after item 1 has started executing
            buffer.Post(secondItem);

            Task.Delay(1).Wait(); // Let item 2 get propagated to the ActionBlock
            action.Complete();
            action.Completion.Wait();

            Assert.True(passed, "Test failed: executingFirst is false.");
        }
Beispiel #44
0
        public async Task TestSchedulerUsage()
        {
            foreach (bool singleProducerConstrained in DataflowTestHelpers.BooleanValues)
            {
                var scheduler = new ConcurrentExclusiveSchedulerPair().ExclusiveScheduler;

                var sync = new ActionBlock<int>(_ => Assert.Equal(scheduler.Id, TaskScheduler.Current.Id),
                    new ExecutionDataflowBlockOptions 
                    { 
                        TaskScheduler = scheduler,
                        SingleProducerConstrained = singleProducerConstrained
                    });
                sync.PostRange(0, 10);
                sync.Complete();
                await sync.Completion;

                var async = new ActionBlock<int>(_ => {
                    Assert.Equal(scheduler.Id, TaskScheduler.Current.Id);
                    return Task.FromResult(0);
                }, new ExecutionDataflowBlockOptions
                    {
                        TaskScheduler = scheduler,
                        SingleProducerConstrained = singleProducerConstrained
                    });
                async.PostRange(0, 10);
                async.Complete();
                await async.Completion;
            }
        }
Beispiel #45
0
        //[Fact(Skip = "Outerloop")]
        public void TestReleasingOfPostponedMessages()
        {
            const int excess = 5;
            for (int dop = 1; dop <= Parallelism.ActualDegreeOfParallelism; dop++)
            {
                var localPassed = true;
                var nextOfferEvent = new AutoResetEvent(true);
                var releaseProcessingEvent = new ManualResetEventSlim();
                var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, BoundedCapacity = dop };
                var action = new ActionBlock<int>(x => { nextOfferEvent.Set(); releaseProcessingEvent.Wait(); }, options);
                var sendAsyncDop = new Task<bool>[dop];
                var sendAsyncExcess = new Task<bool>[excess];

                // Send DOP messages
                for (int i = 0; i < dop; i++)
                {
                    // Throttle sending to make sure we saturate DOP exactly
                    nextOfferEvent.WaitOne();
                    sendAsyncDop[i] = action.SendAsync(i);
                }

                // Send EXCESS more messages. All of these will surely be postponed
                for (int i = 0; i < excess; i++)
                    sendAsyncExcess[i] = action.SendAsync(dop + i);

                // Wait until the tasks for the first DOP messages get completed
                Task.WaitAll(sendAsyncDop, 5000);

                // Complete the block. This will cause the EXCESS messages to be declined.
                action.Complete();
                releaseProcessingEvent.Set();

                // Verify all DOP messages have been accepted
                for (int i = 0; i < dop; i++) localPassed &= sendAsyncDop[i].Result;
                Assert.True(localPassed, string.Format("DOP={0} : Consumed up to DOP - {1}", dop, localPassed ? "Passed" : "FAILED"));


                // Verify all EXCESS messages have been declined
                localPassed = true;
                for (int i = 0; i < excess; i++) localPassed &= !sendAsyncExcess[i].Result;
                Assert.True(localPassed, string.Format("DOP={0} : Declined excess - {1}", dop, localPassed ? "Passed" : "FAILED"));
            }
        }
 public async Task TestLinkTo_Predicate()
 {
     int counter = 0;
     var source = new BufferBlock<int>();
     var target = new ActionBlock<int>(i => counter++);
     using (source.LinkTo(target, i => i % 2 == 0))
     using (source.LinkTo(DataflowBlock.NullTarget<int>()))
     {
         source.PostRange(0, 6);
         source.Complete();
         await source.Completion.ContinueWith(delegate { target.Complete(); }, TaskScheduler.Default);
         await target.Completion;
     }
     Assert.Equal(expected: 3, actual: counter);
 }
Beispiel #47
0
        /// <summary>
        /// THIS CODE IS JUST EXAMPLE AND NOT INTENDED FOR PRODUCTION USE
        /// </summary>
        /// <param name="args"></param>
        static void Main(string[] args)
        {
            // Create the cancellation source.
            var cancellationSource = new CancellationTokenSource();

            var inputWorkBufferBlock = new BufferBlock<Uri>();

            // Input - Uri - seed address
            // Output - Uri - key, content, content-type
            var downloaderBlock = new TransformBlock<Uri, string>(address =>
                {
                    var httpClient = new HttpClient();
                    // Downloads the requested resource as a string.
                    Console.WriteLine("Downloading '{0}'... Thread id {1}", address.OriginalString, Thread.CurrentThread.ManagedThreadId);

                    var contentType = string.Empty;
                    var content = httpClient.GetAsync(address).ContinueWith(task =>
                        {
                            HttpResponseMessage response = task.Result;
                            if (task.Result.IsSuccessStatusCode)
                            {
                                return task.Result.Content.ReadAsStringAsync();
                            }

                            return new Task<string>(() => null);
                        }).Unwrap();

                    return content.Result;
                }, new ExecutionDataflowBlockOptions
                {
                    CancellationToken = cancellationSource.Token,
                    MaxDegreeOfParallelism = 5
                });

            var outputBufferBlock = new BufferBlock<string>();
            var saverBlock = new ActionBlock<string>(content =>
                {
                    if (content != null)
                    {
                        const string targetPath = "c:\\work\\tmp";
                        const string extension = ".html";
                        var fileName = Path.ChangeExtension(Path.Combine(targetPath, Path.GetRandomFileName()), extension);
                        Console.WriteLine("Saving {0} ...Thread: {1}", fileName, Thread.CurrentThread.ManagedThreadId);
                        using (var stream = new StreamWriter(fileName))
                        {
                            stream.Write(content);
                        }
                    }
                }, new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = 1
                });

            // Blocks linking
            inputWorkBufferBlock.LinkTo(downloaderBlock);
            // Filtering, skips empty response
            downloaderBlock.LinkTo(outputBufferBlock, s => !string.IsNullOrWhiteSpace(s));
            outputBufferBlock.LinkTo(saverBlock);

            // Propagating completition
            inputWorkBufferBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)downloaderBlock).Fault(t.Exception);
                else downloaderBlock.Complete();
            });
            downloaderBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)outputBufferBlock).Fault(t.Exception);
                else outputBufferBlock.Complete();
            });
            outputBufferBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)outputBufferBlock).Fault(t.Exception);
                else outputBufferBlock.Complete();
            });
            outputBufferBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)saverBlock).Fault(t.Exception);
                else saverBlock.Complete();
            });

            // Message passing
            inputWorkBufferBlock.Post(new Uri("http://svnbook.red-bean.com/nightly/ru/svn-book.html"));
            inputWorkBufferBlock.Post(new Uri("http://bash.im"));
            inputWorkBufferBlock.Post(new Uri("http://habrahabr.ru"));
            inputWorkBufferBlock.Post(new Uri("http://lb.ua"));
            inputWorkBufferBlock.Post(new Uri("http://blogs.msdn.com/b/pfxteam/"));
            inputWorkBufferBlock.Post(new Uri("http://hgbook.red-bean.com/read/a-tour-of-mercurial-merging-work.html"));
            inputWorkBufferBlock.Complete();

            saverBlock.Completion.Wait();
            Console.WriteLine("Job is DONE...");
            Console.WriteLine("Hit ANY KEY to exit...");
            Console.ReadKey();
        }
Beispiel #48
0
        static void Main(string[] args)
        {
            //
            // Create the members of the pipeline.
            //

            // Downloads the requested resource as a string.
            var downloadString = new TransformBlock<string, string>(uri =>
            {
                Console.WriteLine("Downloading '{0}'...", uri);

                return new WebClient().DownloadString(uri);
            });

            // Separates the specified text into an array of words.
            var createWordList = new TransformBlock<string, string[]>(text =>
            {
                Console.WriteLine("Creating word list...");

                // Remove common punctuation by replacing all non-letter characters
                // with a space character to.
                char[] tokens = text.ToArray();
                for (int i = 0; i < tokens.Length; i++)
                {
                    if (!char.IsLetter(tokens[i]))
                        tokens[i] = ' ';
                }
                text = new string(tokens);

                // Separate the text into an array of words.
                return text.Split(new char[] { ' ' },
                   StringSplitOptions.RemoveEmptyEntries);
            });

            // Removes short words, orders the resulting words alphabetically,
            // and then remove duplicates.
            var filterWordList = new TransformBlock<string[], string[]>(words =>
            {
                Console.WriteLine("Filtering word list...");

                return words.Where(word => word.Length > 3).OrderBy(word => word)
                   .Distinct().ToArray();
            });

            // Finds all words in the specified collection whose reverse also
            // exists in the collection.
            var findReversedWords = new TransformManyBlock<string[], string>(words =>
            {
                Console.WriteLine("Finding reversed words...");

                // Holds reversed words.
                var reversedWords = new ConcurrentQueue<string>();

                // Add each word in the original collection to the result whose
                // reversed word also exists in the collection.
                Parallel.ForEach(words, word =>
                {
                    // Reverse the work.
                    string reverse = new string(word.Reverse().ToArray());

                    // Enqueue the word if the reversed version also exists
                    // in the collection.
                    if (Array.BinarySearch<string>(words, reverse) >= 0 &&
                        word != reverse)
                    {
                        reversedWords.Enqueue(word);
                    }
                });

                return reversedWords;
            });

            // Prints the provided reversed words to the console.
            var printReversedWords = new ActionBlock<string>(reversedWord =>
            {
                Console.WriteLine("Found reversed words {0}/{1}",
                   reversedWord, new string(reversedWord.Reverse().ToArray()));
            });

            //
            // Connect the dataflow blocks to form a pipeline.
            //

            downloadString.LinkTo(createWordList);
            createWordList.LinkTo(filterWordList);
            filterWordList.LinkTo(findReversedWords);
            findReversedWords.LinkTo(printReversedWords);

            //
            // For each completion task in the pipeline, create a continuation task
            // that marks the next block in the pipeline as completed.
            // A completed dataflow block processes any buffered elements, but does
            // not accept new elements.
            //

            downloadString.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)createWordList).Fault(t.Exception);
                else createWordList.Complete();
            });
            createWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)filterWordList).Fault(t.Exception);
                else filterWordList.Complete();
            });
            filterWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)findReversedWords).Fault(t.Exception);
                else findReversedWords.Complete();
            });
            findReversedWords.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)printReversedWords).Fault(t.Exception);
                else printReversedWords.Complete();
            });

            // Process "The Iliad of Homer" by Homer.
            downloadString.Post("http://www.gutenberg.org/files/6130/6130-0.txt");

            // Mark the head of the pipeline as complete. The continuation tasks
            // propagate completion through the pipeline as each part of the
            // pipeline finishes.
            downloadString.Complete();

            // Wait for the last block in the pipeline to process all messages.
            printReversedWords.Completion.Wait();

            Console.ReadLine();
        }
        public static async Task Compress(Stream inputStream, Stream outputStream)
        {
            var buffer = new BufferBlock<CompressionDetails>(new DataflowBlockOptions { BoundedCapacity = BoundedCapacity });
            var compressorOptions = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = MaxDegreeOfParallelism,
                BoundedCapacity = BoundedCapacity
            };

            var writerOptions = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity = BoundedCapacity,
                SingleProducerConstrained = true
            };

            var compressor = new TransformBlock<CompressionDetails, CompressionDetails>(compressionDetails => Compress(compressionDetails), compressorOptions);
            var writer = new ActionBlock<CompressionDetails>(compressionDetailsWithSize => Multiplex(outputStream, compressionDetailsWithSize), writerOptions);

            buffer.LinkTo(compressor);
            compressor.LinkTo(writer);
            
            buffer.Completion.ContinueWith(task => compressor.Complete()); 
            compressor.Completion.ContinueWith(task => writer.Complete());

            long sourceLength = inputStream.Length;
            // Write total size to destination
            byte[] size = BitConverter.GetBytes(sourceLength);
            await outputStream.WriteAsync(size, 0, size.Length);

            long chunkSize = 1048576; // 1 MB
            int index = 0;
            while (sourceLength > 0)
            {
                byte[] data = new byte[chunkSize];
                int readCount = await inputStream.ReadAsync(data, 0, data.Length);

                byte[] bytes = new byte[readCount];
                Buffer.BlockCopy(data, 0, bytes, 0, readCount);

                CompressionDetails compressionDetails = new CompressionDetails
                {
                    Bytes = bytes,
                    ChunkSize = BitConverter.GetBytes(chunkSize),
                    Sequence = ++index
                };

                while (await buffer.SendAsync(compressionDetails) != true) { }

                sourceLength -= chunkSize;
                if (sourceLength < chunkSize)
                    chunkSize = sourceLength;

                if (sourceLength == 0)
                    buffer.Complete();
            }
            writer.Completion.Wait();

            await outputStream.FlushAsync();
            inputStream.Dispose();
            outputStream.Dispose();
        }
Beispiel #50
0
        internal static bool WriteOnceToAction()
        {
            const int ITERS = 2;
            int completedCount = 0;
            var c = new ActionBlock<int>(i => completedCount++);
            var singleAssignments = Enumerable.Range(0, ITERS).Select(_ =>
            {
                var s = new WriteOnceBlock<int>(i => i);
                s.LinkTo(c);
                return s;
            }).ToList();
            Task.Factory.ContinueWhenAll(singleAssignments.Select(s => s.Completion).ToArray(), _ => c.Complete());

            foreach (var s in singleAssignments) s.Post(1);
            c.Completion.Wait();

            return completedCount == ITERS;
        }
Beispiel #51
0
        internal static bool TransformThroughDiscardingFilterToAction()
        {
            const int ITERS = 2;
            int completedCount = 0;

            var t = new TransformBlock<int, int>(i => i);
            var c = new ActionBlock<int>(i => completedCount++);

            t.LinkTo(c, i => i % 2 == 0);
            t.LinkTo(DataflowBlock.NullTarget<int>());
            t.Completion.ContinueWith(_ => c.Complete());

            for (int i = 0; i < ITERS; i++) t.Post(i);
            t.Complete();
            c.Completion.Wait();

            return completedCount == ITERS / 2;
        }
Beispiel #52
0
        public async Task TestNullReturnedTasks()
        {
            int sumOfOdds = 0;

            var ab = new ActionBlock<int>(i => {
                if ((i % 2) == 0) return null;
                return Task.Run(() => { sumOfOdds += i; });
            });

            const int MaxValue = 10;
            ab.PostRange(0, MaxValue);
            ab.Complete();
            await ab.Completion;

            Assert.Equal(
                expected: Enumerable.Range(0, MaxValue).Where(i => i % 2 != 0).Sum(),
                actual: sumOfOdds);
        }
        public void TestWriteOnceCloning()
        {
            // Test cloning when a clone function is provided
            {
                var writeOnce = new WriteOnceBlock<int>(x => -x);
                Assert.True(writeOnce.Post(42), "Expected initial post on cloning WriteOnce to succeed");
                Assert.False(writeOnce.Post(43), "Expected secondary post on cloning WriteOnce to fail");
                Assert.True(writeOnce.Receive() == -42, "Expected Receive'd data to be a clone");
                int item;
                Assert.True(writeOnce.TryReceive(out item) && item == -42, "Expected TryReceive'd data to be a clone");
                IList<int> items;
                Assert.True(((IReceivableSourceBlock<int>)writeOnce).TryReceiveAll(out items) && items.Count == 1 && items[0] == -42, "Expected TryReceiveAll'd data to be a clone");
                var ab = new ActionBlock<int>(i =>
                {
                    Assert.True(i == -42, "Expected propagated data to be a clone.");
                });
                writeOnce.LinkTo(ab);
                ab.Complete();
                Assert.True(ab.Completion.Wait(4000), "Expected action block to complete after cloned data flowed to it");
            }

            // Test successful processing when no clone function exists
            {
                var data = new object();
                var writeOnce = new WriteOnceBlock<object>(null);
                Assert.True(writeOnce.Post(data), "Expected initial post on non-cloning WriteOnce to succeed");
                Assert.False(writeOnce.Post(new object()), "Expected secondary post on non-cloning WriteOnce to fail");
                Assert.True(writeOnce.Receive() == data, "Expected Receive'd data to be original data");
                object item;
                Assert.True(writeOnce.TryReceive(out item) && item == data, "Expected TryReceive'd data to be original data");
                IList<object> items;
                Assert.True(((IReceivableSourceBlock<object>)writeOnce).TryReceiveAll(out items) && items.Count == 1 && items[0] == data, "Expected TryReceiveAll'd data to be original data");
                var ab = new ActionBlock<object>(i =>
                {
                    Assert.True(i == data, "Expected propagated data to be original data.");
                });
                writeOnce.LinkTo(ab);
                ab.Complete();
                Assert.True(ab.Completion.Wait(4000), "Expected action block to complete after original data flowed to it");
            }
        }
        async Task TransformCoreAsync(StreamReader reader, StreamWriter writer)
        {
            var writerBlock = new ActionBlock<string>(l => writer.WriteLineAsync(l));

            for (; ; )
            {
                var line = await reader.ReadLineAsync().ConfigureAwait(false);

                if (null == line)
                    break;

                line = line.Normalize(NormalizationForm.FormC);

                line = line.TrimEnd();

                writerBlock.Post(line);
            }

            writerBlock.Complete();

            await writerBlock.Completion.ConfigureAwait(false);
        }
        public async Task WriteOnceToAction()
        {
            int completedCount = 0;
            var c = new ActionBlock<int>(i => completedCount++);
            var singleAssignments = Enumerable.Range(0, Iterations).Select(_ =>
            {
                var s = new WriteOnceBlock<int>(i => i);
                s.LinkTo(c);
                return s;
            }).ToList();
            var ignored = Task.WhenAll(singleAssignments.Select(s => s.Completion)).ContinueWith(
                _ => c.Complete(), CancellationToken.None, TaskContinuationOptions.None, TaskScheduler.Default);

            foreach (var s in singleAssignments) s.Post(1);

            await c.Completion;
            Assert.Equal(expected: Iterations, actual: completedCount);
        }
Beispiel #56
0
        private static IEnumerable<string> ConvertToCsvLines(IEnumerable<CloudBlockBlob> blobs, int maxDegreeOfParallelism)
        {
            var lines = new BlockingCollection<string>(BoundedCapacity);

            Task.Run(async () =>
                {
                    var actionBlock = new ActionBlock<CloudBlockBlob>(
                    async (b) =>
                    {
                        var line = await ConvertToCsvLineAsync(b);
                        lines.Add(line);
                    },
                    new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism });

                    foreach (var blob in blobs)
                    {
                        var postSuccess = actionBlock.Post(blob);
                    }

                    actionBlock.Complete();
                    await actionBlock.Completion;
                    lines.CompleteAdding();
                });

            return lines.GetConsumingEnumerable();
        }
Beispiel #57
0
 public async Task TestLinkingToCompleted()
 {
     var b = new BroadcastBlock<int>(i => i * 2);
     var ab = new ActionBlock<int>(i => { });
     b.LinkTo(ab);
     ab.Complete();
     Assert.True(b.Post(1));
     b.Complete();
     await b.Completion;
 }
Beispiel #58
0
        //[Fact(Skip = "Outerloop")]
        public void RunActionBlockConformanceTests()
        {
            // SYNC
            // Do everything twice - once through OfferMessage and Once through Post
            for (FeedMethod feedMethod = FeedMethod._First; feedMethod < FeedMethod._Count; feedMethod++)
            {
                Func<DataflowBlockOptions, TargetProperties<int>> actionBlockFactory =
                    options =>
                    {
                        ITargetBlock<int> target = new ActionBlock<int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options);
                        return new TargetProperties<int> { Target = target, Capturer = target, ErrorVerifyable = true };
                    };

                CancellationTokenSource cancellationSource = new CancellationTokenSource();
                var defaultOptions = new ExecutionDataflowBlockOptions();
                var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount };
                var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 1 };
                var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 1, CancellationToken = cancellationSource.Token };
                var spscOptions = new ExecutionDataflowBlockOptions { SingleProducerConstrained = true };
                var spscMptOptions = new ExecutionDataflowBlockOptions { SingleProducerConstrained = true, MaxMessagesPerTask = 10 };

                Assert.True(FeedTarget(actionBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, dopOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, mptOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, mptOptions, 1, Intervention.Complete, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, cancellationOptions, 1, Intervention.Cancel, cancellationSource, feedMethod, true));

                Assert.True(FeedTarget(actionBlockFactory, spscOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, spscOptions, 1, Intervention.Complete, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, spscMptOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, spscMptOptions, 1, Intervention.Complete, null, feedMethod, true));
            }

            // Test scheduler usage
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    var sts = new SimpleTaskScheduler();

                    var options = new ExecutionDataflowBlockOptions { TaskScheduler = sts, MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, MaxMessagesPerTask = 1 };
                    if (trial == 0) options.SingleProducerConstrained = true;

                    var ab = new ActionBlock<int>(i => localPassed &= TaskScheduler.Current.Id == sts.Id, options);
                    for (int i = 0; i < 2; i++) ab.Post(i);
                    ab.Complete();
                    ab.Completion.Wait();
                }

                Assert.True(localPassed, string.Format("{0}: Correct scheduler usage", localPassed ? "Success" : "Failure"));
            }

            // Test count
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    var barrier1 = new Barrier(2);
                    var barrier2 = new Barrier(2);
                    var ab = new ActionBlock<int>(i =>
                    {
                        barrier1.SignalAndWait();
                        barrier2.SignalAndWait();
                    }, new ExecutionDataflowBlockOptions { SingleProducerConstrained = (trial == 0) });
                    for (int iter = 0; iter < 2; iter++)
                    {
                        for (int i = 1; i <= 2; i++) ab.Post(i);
                        for (int i = 1; i >= 0; i--)
                        {
                            barrier1.SignalAndWait();
                            localPassed &= i == ab.InputCount;
                            barrier2.SignalAndWait();
                        }
                    }
                }

                Assert.True(localPassed, string.Format("{0}: InputCount", localPassed ? "Success" : "Failure"));
            }

            // Test ordering
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    int prev = -1;
                    var ab = new ActionBlock<int>(i =>
                    {
                        if (prev + 1 != i) localPassed &= false;
                        prev = i;
                    }, new ExecutionDataflowBlockOptions { SingleProducerConstrained = (trial == 0) });
                    for (int i = 0; i < 2; i++) ab.Post(i);
                    ab.Complete();
                    ab.Completion.Wait();
                }

                Assert.True(localPassed, string.Format("{0}: Correct ordering", localPassed ? "Success" : "Failure"));
            }

            // Test non-greedy
            {
                bool localPassed = true;
                var barrier = new Barrier(2);
                var ab = new ActionBlock<int>(i =>
                {
                    barrier.SignalAndWait();
                }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });
                ab.SendAsync(1);
                Task.Delay(200).Wait();
                var sa2 = ab.SendAsync(2);
                localPassed &= !sa2.IsCompleted;
                barrier.SignalAndWait(); // for SendAsync(1)
                barrier.SignalAndWait(); // for SendAsync(2)
                localPassed &= sa2.Wait(100);
                int total = 0;
                ab = new ActionBlock<int>(i =>
                {
                    Interlocked.Add(ref total, i);
                    Task.Delay(1).Wait();
                }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });
                for (int i = 1; i <= 100; i++) ab.SendAsync(i);
                SpinWait.SpinUntil(() => total == ((100 * 101) / 2), 30000);
                localPassed &= total == ((100 * 101) / 2);
                Assert.True(localPassed, string.Format("total={0} (must be {1})", total, (100 * 101) / 2));
                Assert.True(localPassed, string.Format("{0}: Non-greedy support", localPassed ? "Success" : "Failure"));
            }

            // Test that OperationCanceledExceptions are ignored
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    int sumOfOdds = 0;
                    var ab = new ActionBlock<int>(i =>
                    {
                        if ((i % 2) == 0) throw new OperationCanceledException();
                        sumOfOdds += i;
                    }, new ExecutionDataflowBlockOptions { SingleProducerConstrained = (trial == 0) });
                    for (int i = 0; i < 4; i++) ab.Post(i);
                    ab.Complete();
                    ab.Completion.Wait();
                    localPassed = sumOfOdds == (1 + 3);
                }

                Assert.True(localPassed, string.Format("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"));
            }

            // Test using a precanceled token
            {
                bool localPassed = true;
                try
                {
                    var cts = new CancellationTokenSource();
                    cts.Cancel();
                    var dbo = new ExecutionDataflowBlockOptions { CancellationToken = cts.Token };
                    var ab = new ActionBlock<int>(i => { }, dbo);

                    localPassed &= ab.Post(42) == false;
                    localPassed &= ab.InputCount == 0;
                    localPassed &= ab.Completion != null;
                    ab.Complete();
                }
                catch (Exception)
                {
                    localPassed = false;
                }

                Assert.True(localPassed, string.Format("{0}: Precanceled tokens work correctly", localPassed ? "Success" : "Failure"));
            }

            // Test faulting
            {
                bool localPassed = true;
                for (int trial = 0; trial < 2; trial++)
                {
                    var ab = new ActionBlock<int>(i => { throw new InvalidOperationException(); },
                        new ExecutionDataflowBlockOptions { SingleProducerConstrained = (trial == 0) });
                    ab.Post(42);
                    ab.Post(1);
                    ab.Post(2);
                    ab.Post(3);
                    try { localPassed &= ab.Completion.Wait(5000); }
                    catch { }
                    localPassed &= ab.Completion.IsFaulted;
                    localPassed &= SpinWait.SpinUntil(() => ab.InputCount == 0, 500);
                    localPassed &= ab.Post(4) == false;
                }

                Assert.True(localPassed, string.Format("{0}: Faulted handled correctly", localPassed ? "Success" : "Failure"));
            }

            // ASYNC (a copy of the sync but with constructors returning Task instead of void

            // Do everything twice - once through OfferMessage and Once through Post
            for (FeedMethod feedMethod = FeedMethod._First; feedMethod < FeedMethod._Count; feedMethod++)
            {
                Func<DataflowBlockOptions, TargetProperties<int>> actionBlockFactory =
                    options =>
                    {
                        ITargetBlock<int> target = new ActionBlock<int>(i => TrackCapturesAsync(i), (ExecutionDataflowBlockOptions)options);
                        return new TargetProperties<int> { Target = target, Capturer = target, ErrorVerifyable = true };
                    };
                CancellationTokenSource cancellationSource = new CancellationTokenSource();
                var defaultOptions = new ExecutionDataflowBlockOptions();
                var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount };
                var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 10 };
                var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 100, CancellationToken = cancellationSource.Token };

                Assert.True(FeedTarget(actionBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, defaultOptions, 10, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, dopOptions, 1000, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, mptOptions, 10000, Intervention.None, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, mptOptions, 10000, Intervention.Complete, null, feedMethod, true));
                Assert.True(FeedTarget(actionBlockFactory, cancellationOptions, 10000, Intervention.Cancel, cancellationSource, feedMethod, true));
            }

            // Test scheduler usage
            {
                bool localPassed = true;
                var sts = new SimpleTaskScheduler();
                var ab = new ActionBlock<int>(i =>
                    {
                        localPassed &= TaskScheduler.Current.Id == sts.Id;
                        return Task.Factory.StartNew(() => { });
                    }, new ExecutionDataflowBlockOptions { TaskScheduler = sts, MaxDegreeOfParallelism = -1, MaxMessagesPerTask = 10 });
                for (int i = 0; i < 2; i++) ab.Post(i);
                ab.Complete();
                ab.Completion.Wait();
                Assert.True(localPassed, string.Format("{0}: Correct scheduler usage", localPassed ? "Success" : "Failure"));
            }

            // Test count
            {
                bool localPassed = true;
                var barrier1 = new Barrier(2);
                var barrier2 = new Barrier(2);
                var ab = new ActionBlock<int>(i => Task.Factory.StartNew(() =>
                {
                    barrier1.SignalAndWait();
                    barrier2.SignalAndWait();
                }));
                for (int iter = 0; iter < 2; iter++)
                {
                    for (int i = 1; i <= 2; i++) ab.Post(i);
                    for (int i = 1; i >= 0; i--)
                    {
                        barrier1.SignalAndWait();
                        localPassed &= i == ab.InputCount;
                        barrier2.SignalAndWait();
                    }
                }
                Assert.True(localPassed, string.Format("{0}: InputCount", localPassed ? "Success" : "Failure"));
            }

            // Test ordering
            {
                bool localPassed = true;
                int prev = -1;
                var ab = new ActionBlock<int>(i =>
                {
                    return Task.Factory.StartNew(() =>
                    {
                        if (prev + 1 != i) localPassed &= false;
                        prev = i;
                    });
                });
                for (int i = 0; i < 2; i++) ab.Post(i);
                ab.Complete();
                ab.Completion.Wait();
                Assert.True(localPassed, string.Format("{0}: Correct ordering", localPassed ? "Success" : "Failure"));
            }

            // Test non-greedy
            {
                bool localPassed = true;
                var barrier = new Barrier(2);
                var ab = new ActionBlock<int>(i =>
                {
                    return Task.Factory.StartNew(() =>
                    {
                        barrier.SignalAndWait();
                    });
                }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });
                ab.SendAsync(1);
                Task.Delay(200).Wait();
                var sa2 = ab.SendAsync(2);
                localPassed &= !sa2.IsCompleted;
                barrier.SignalAndWait(); // for SendAsync(1)
                barrier.SignalAndWait(); // for SendAsync(2)
                localPassed &= sa2.Wait(100);
                int total = 0;
                ab = new ActionBlock<int>(i =>
                {
                    return Task.Factory.StartNew(() =>
                    {
                        Interlocked.Add(ref total, i);
                        Task.Delay(1).Wait();
                    });
                }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });
                for (int i = 1; i <= 100; i++) ab.SendAsync(i);
                SpinWait.SpinUntil(() => total == ((100 * 101) / 2), 30000);
                localPassed &= total == ((100 * 101) / 2);
                Assert.True(localPassed, string.Format("total={0} (must be {1})", total, (100 * 101) / 2));
                Assert.True(localPassed, string.Format("{0}: Non-greedy support", localPassed ? "Success" : "Failure"));
            }

            // Test that OperationCanceledExceptions are ignored
            {
                bool localPassed = true;
                int sumOfOdds = 0;
                var ab = new ActionBlock<int>(i =>
                {
                    if ((i % 2) == 0) throw new OperationCanceledException();
                    return Task.Factory.StartNew(() => { sumOfOdds += i; });
                });
                for (int i = 0; i < 4; i++) ab.Post(i);
                ab.Complete();
                ab.Completion.Wait();
                localPassed = sumOfOdds == (1 + 3);
                Assert.True(localPassed, string.Format("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"));
            }

            // Test that null task is ignored
            {
                bool localPassed = true;
                int sumOfOdds = 0;
                var ab = new ActionBlock<int>(i =>
                {
                    if ((i % 2) == 0) return null;
                    return Task.Factory.StartNew(() => { sumOfOdds += i; });
                });
                for (int i = 0; i < 4; i++) ab.Post(i);
                ab.Complete();
                ab.Completion.Wait();
                localPassed = sumOfOdds == (1 + 3);
                Assert.True(localPassed, string.Format("{0}: null tasks are ignored", localPassed ? "Success" : "Failure"));
            }

            // Test faulting from the delegate
            {
                bool localPassed = true;
                var ab = new ActionBlock<int>(new Func<int, Task>(i => { throw new InvalidOperationException(); }));
                ab.Post(42);
                ab.Post(1);
                ab.Post(2);
                ab.Post(3);
                try { localPassed &= ab.Completion.Wait(100); }
                catch { }
                localPassed &= ab.Completion.IsFaulted;
                localPassed &= SpinWait.SpinUntil(() => ab.InputCount == 0, 500);
                localPassed &= ab.Post(4) == false;
                Assert.True(localPassed, string.Format("{0}: Faulted from delegate handled correctly", localPassed ? "Success" : "Failure"));
            }

            // Test faulting from the task
            {
                bool localPassed = true;
                var ab = new ActionBlock<int>(i => Task.Factory.StartNew(() => { throw new InvalidOperationException(); }));
                ab.Post(42);
                ab.Post(1);
                ab.Post(2);
                ab.Post(3);
                try { localPassed &= ab.Completion.Wait(100); }
                catch { }
                localPassed &= ab.Completion.IsFaulted;
                localPassed &= SpinWait.SpinUntil(() => ab.InputCount == 0, 500);
                localPassed &= ab.Post(4) == false;
                Assert.True(localPassed, string.Format("{0}: Faulted from task handled correctly", localPassed ? "Success" : "Failure"));
            }
        }
        public async Task TestLinkTo_MaxMessages()
        {
            Assert.Throws<ArgumentOutOfRangeException>(() => new DataflowLinkOptions { MaxMessages = -2 });
            Assert.Throws<ArgumentOutOfRangeException>(() => new DataflowLinkOptions { MaxMessages = 0 });

            const int MaxMessages = 3, ExtraMessages = 2;

            for (int mode = 0; mode < 3; mode++)
            {
                int consumedMessages = 0, remainingMessages = 0;
                var options = new DataflowLinkOptions() { MaxMessages = MaxMessages };
                var source = new BufferBlock<int>();
                var target = new ActionBlock<int>(x => consumedMessages++);
                var otherTarget = new ActionBlock<int>(x => remainingMessages++);

                switch (mode)
                {
                    case 0:
                        source.LinkTo(target, options);
                        break;
                    case 1:
                        source.LinkTo(target, options, x => true); // Injects FilteredLinkPropagator
                        break;
                    case 2:
                        using (source.LinkTo(target)) source.LinkTo(target, options); // Injects NopLinkPropagator
                        break;
                }
                source.LinkTo(otherTarget);

                source.PostRange(0, MaxMessages + ExtraMessages);
                source.Complete();
                await source.Completion;

                target.Complete();
                otherTarget.Complete();
                await Task.WhenAll(target.Completion, otherTarget.Completion);

                Assert.Equal(expected: MaxMessages, actual: consumedMessages);
                Assert.Equal(expected: ExtraMessages, actual: remainingMessages);
            }
        }
        public static async Task DecompressFastDataFlow(Stream inputStream, Stream outputStream)
        {
            var buffer = new BufferBlock<DecompressionDetails>(new DataflowBlockOptions { BoundedCapacity = BoundedCapacity });
            var compressorOptions = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = MaxDegreeOfParallelism,
                BoundedCapacity = BoundedCapacity
            };

            var writerOptions = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity = BoundedCapacity,
                SingleProducerConstrained = true 
            };

            var compressor = new TransformBlock<DecompressionDetails, DecompressionDetails>(compressionDetails => Decompress(compressionDetails), compressorOptions);
            var writer = new ActionBlock<DecompressionDetails>(compressionDetailsWithSize => Multiplex(buffer, outputStream, compressionDetailsWithSize), writerOptions);


            buffer.LinkTo(compressor);
            compressor.LinkTo(writer);

            buffer.Completion.ContinueWith(task => compressor.Complete());      
            compressor.Completion.ContinueWith(task => writer.Complete());




            byte[] size = new byte[sizeof(long)];
            await inputStream.ReadAsync(size, 0, size.Length);
            // convert the size to number
            long sourceLength = BitConverter.ToInt64(size, 0);

            int index = 0;
            while (sourceLength > 0)
            {
                size = new byte[sizeof(long)];
                await inputStream.ReadAsync(size, 0, size.Length);

                // convert the size back to number
                long chunkSize = BitConverter.ToInt64(size, 0);
                if (chunkSize > sourceLength) throw new InvalidDataException("");

                // read the compressed size
                size = new byte[sizeof(int)];
                await inputStream.ReadAsync(size, 0, size.Length);

                // convert the size back to number
                int storedSize = BitConverter.ToInt32(size, 0);

                byte[] compressedData = new byte[storedSize];
                int readCount = await inputStream.ReadAsync(compressedData, 0, compressedData.Length);

                DecompressionDetails decompressionDetails = new DecompressionDetails
                {
                    Bytes = compressedData,
                    ChunkSize = chunkSize,             
                    Sequence = ++index
                };

                while (await buffer.SendAsync(decompressionDetails) != true) { }

                sourceLength -= chunkSize;
                if (sourceLength < chunkSize)
                    chunkSize = sourceLength;

                if (sourceLength == 0)
                    buffer.Complete();
            }
            writer.Completion.Wait();

            outputStream.Flush();
            inputStream.Dispose();
            outputStream.Dispose();
        }