public BatchSubscriber( EventConsumerGrain grain, IEventDataFormatter eventDataFormatter, IEventConsumer eventConsumer, Func <IEventSubscriber, IEventSubscription> factory, TaskScheduler scheduler) { this.eventDataFormatter = eventDataFormatter; var batchSize = Math.Max(1, eventConsumer !.BatchSize); var batchDelay = Math.Max(100, eventConsumer.BatchDelay); var parse = new TransformBlock <Job, Job>(job => { if (job.StoredEvent != null) { job.ShouldHandle = eventConsumer.Handles(job.StoredEvent); } if (job.ShouldHandle) { try { job.Event = ParseKnownEvent(job.StoredEvent !); } catch (Exception ex) { job.Exception = ex; } } return(job); }, new ExecutionDataflowBlockOptions { BoundedCapacity = batchSize, MaxDegreeOfParallelism = 1, MaxMessagesPerTask = 1 }); var buffer = AsyncHelper.CreateBatchBlock <Job>(batchSize, batchDelay, new GroupingDataflowBlockOptions { BoundedCapacity = batchSize * 2 }); var handle = new ActionBlock <IList <Job> >(async jobs => { foreach (var jobsBySender in jobs.GroupBy <Job, object>(x => x.Sender)) { var sender = jobsBySender.Key; if (ReferenceEquals(sender, eventSubscription.Sender)) { var exception = jobs.FirstOrDefault(x => x.Exception != null)?.Exception; if (exception != null) { await grain.OnErrorAsync(Sender, exception); } else { await grain.OnEventsAsync(Sender, GetEvents(jobsBySender), GetPosition(jobsBySender)); } } } }, new ExecutionDataflowBlockOptions { BoundedCapacity = 2, MaxDegreeOfParallelism = 1, MaxMessagesPerTask = 1, TaskScheduler = scheduler }); parse.LinkTo(buffer, new DataflowLinkOptions { PropagateCompletion = true }); buffer.LinkTo(handle, new DataflowLinkOptions { PropagateCompletion = true }); pipelineStart = parse; pipelineEnd = handle; eventSubscription = factory(this); }