public DiagnosticPipeline(
            IHealthReporter healthReporter,
            IReadOnlyCollection <IObservable <EventData> > inputs,
            IReadOnlyCollection <IFilter> globalFilters,
            IReadOnlyCollection <EventSink> sinks,
            DiagnosticPipelineConfiguration pipelineConfiguration = null,
            bool disposeDependencies = false)
        {
            Requires.NotNull(healthReporter, nameof(healthReporter));
            Requires.NotNull(inputs, nameof(inputs));
            Requires.Argument(inputs.Count > 0, nameof(inputs), "There must be at least one input");
            Requires.NotNull(sinks, nameof(sinks));
            Requires.Argument(sinks.Count > 0, nameof(sinks), "There must be at least one sink");

            this.eventsInProgress = 0;

            this.pipelineConfiguration = pipelineConfiguration ?? new DiagnosticPipelineConfiguration();

            // An estimatie how many batches of events to allow inside the pipeline.
            // We want to be able to process full buffer of events, but also have enough batches in play in case of high concurrency.
            int MaxNumberOfBatchesInProgress = Math.Max(
                5 * this.pipelineConfiguration.MaxConcurrency,
                this.pipelineConfiguration.PipelineBufferSize / this.pipelineConfiguration.MaxEventBatchSize);

            this.Inputs = inputs;
            this.Sinks  = sinks;

            // Just play nice and make sure there is always something to enumerate on
            this.GlobalFilters = globalFilters ?? new IFilter[0];

            this.HealthReporter          = healthReporter;
            this.cancellationTokenSource = new CancellationTokenSource();
            var propagateCompletion = new DataflowLinkOptions()
            {
                PropagateCompletion = true
            };

            this.pipelineLinkDisposables = new List <IDisposable>();
            this.pipelineCompletionTasks = new List <Task>();

            var inputBuffer = new BufferBlock <EventData>(
                new DataflowBlockOptions()
            {
                BoundedCapacity   = this.pipelineConfiguration.PipelineBufferSize,
                CancellationToken = this.cancellationTokenSource.Token
            });

            this.pipelineHead = inputBuffer;
            this.pipelineCompletionTasks.Add(inputBuffer.Completion);

            var batcher = new BatchBlock <EventData>(
                this.pipelineConfiguration.MaxEventBatchSize,
                new GroupingDataflowBlockOptions()
            {
                BoundedCapacity   = this.pipelineConfiguration.MaxEventBatchSize,
                CancellationToken = this.cancellationTokenSource.Token
            }
                );

            this.pipelineLinkDisposables.Add(inputBuffer.LinkTo(batcher, propagateCompletion));
            this.pipelineCompletionTasks.Add(batcher.Completion);

            this.pipelineLinkDisposables.Add(new Timer(
                                                 (unused) => batcher.TriggerBatch(),
                                                 state: null,
                                                 dueTime: TimeSpan.FromMilliseconds(this.pipelineConfiguration.MaxBatchDelayMsec),
                                                 period: TimeSpan.FromMilliseconds(this.pipelineConfiguration.MaxBatchDelayMsec)));

            ISourceBlock <EventData[]> sinkSource;
            FilterAction filterTransform;

            if (this.GlobalFilters.Count > 0)
            {
                filterTransform = new FilterAction(
                    this.GlobalFilters,
                    this.cancellationTokenSource.Token,
                    MaxNumberOfBatchesInProgress,
                    this.pipelineConfiguration.MaxConcurrency,
                    healthReporter,
                    this.OnEventsFilteredOut);
                var globalFiltersBlock = filterTransform.GetFilterBlock();
                this.pipelineLinkDisposables.Add(batcher.LinkTo(globalFiltersBlock, propagateCompletion));
                this.pipelineCompletionTasks.Add(globalFiltersBlock.Completion);
                sinkSource = globalFiltersBlock;
            }
            else
            {
                sinkSource = batcher;
            }

            if (sinks.Count > 1)
            {
                // After broadcasting we will effectively have (sinks.Count - 1) * batch.Length more events in the pipeline,
                // because the broadcaster is cloning the events for the sake of each sink (filters-output combination).
                var eventCounter = new TransformBlock <EventData[], EventData[]>(
                    (batch) => { Interlocked.Add(ref this.eventsInProgress, (sinks.Count - 1) * batch.Length);  return(batch); },
                    new ExecutionDataflowBlockOptions()
                {
                    BoundedCapacity   = MaxNumberOfBatchesInProgress,
                    CancellationToken = this.cancellationTokenSource.Token
                });
                this.pipelineLinkDisposables.Add(sinkSource.LinkTo(eventCounter, propagateCompletion));
                this.pipelineCompletionTasks.Add(eventCounter.Completion);

                var broadcaster = new BroadcastBlock <EventData[]>(
                    (events) => events?.Select((e) => e.DeepClone()).ToArray(),
                    new DataflowBlockOptions()
                {
                    BoundedCapacity   = MaxNumberOfBatchesInProgress,
                    CancellationToken = this.cancellationTokenSource.Token
                });
                this.pipelineLinkDisposables.Add(eventCounter.LinkTo(broadcaster, propagateCompletion));
                this.pipelineCompletionTasks.Add(broadcaster.Completion);
                sinkSource = broadcaster;
            }

            foreach (var sink in sinks)
            {
                ISourceBlock <EventData[]> outputSource = sinkSource;
                if (sink.Filters != null && sink.Filters.Count > 0)
                {
                    filterTransform = new FilterAction(
                        sink.Filters,
                        this.cancellationTokenSource.Token,
                        MaxNumberOfBatchesInProgress,
                        this.pipelineConfiguration.MaxConcurrency,
                        healthReporter,
                        this.OnEventsFilteredOut);
                    var filterBlock = filterTransform.GetFilterBlock();
                    this.pipelineLinkDisposables.Add(sinkSource.LinkTo(filterBlock, propagateCompletion));
                    this.pipelineCompletionTasks.Add(filterBlock.Completion);
                    outputSource = filterBlock;
                }

                OutputAction outputAction = new OutputAction(
                    sink.Output,
                    this.cancellationTokenSource.Token,
                    MaxNumberOfBatchesInProgress,
                    this.pipelineConfiguration.MaxConcurrency,
                    healthReporter,
                    (eventsSentCount) => Interlocked.Add(ref this.eventsInProgress, -eventsSentCount));
                var outputBlock = outputAction.GetOutputBlock();
                this.pipelineLinkDisposables.Add(outputSource.LinkTo(outputBlock, propagateCompletion));
                this.pipelineCompletionTasks.Add(outputBlock.Completion);
            }

            IObserver <EventData> inputBufferObserver = new TargetBlockObserver <EventData>(
                inputBuffer,
                this.HealthReporter,
                () => Interlocked.Increment(ref this.eventsInProgress));

            this.inputSubscriptions = new List <IDisposable>(inputs.Count);
            foreach (var input in inputs)
            {
                this.inputSubscriptions.Add(input.Subscribe(inputBufferObserver));
            }

            this.disposed            = false;
            this.disposeDependencies = disposeDependencies;
        }
        public DiagnosticPipeline(
            IHealthReporter healthReporter,
            IReadOnlyCollection <IObservable <EventData> > inputs,
            IReadOnlyCollection <IFilter> globalFilters,
            IReadOnlyCollection <EventSink> sinks,
            DiagnosticPipelineConfiguration pipelineConfiguration = null,
            bool disposeDependencies    = false,
            TaskScheduler taskScheduler = null)
        {
            Requires.NotNull(healthReporter, nameof(healthReporter));
            Requires.NotNull(inputs, nameof(inputs));
            Requires.Argument(inputs.Count > 0, nameof(inputs), "There must be at least one input");
            Requires.NotNull(sinks, nameof(sinks));
            Requires.Argument(sinks.Count > 0, nameof(sinks), "There must be at least one sink");

            this.batcherTimerDisposalLock = new object();
            this.pipelineConfiguration    = pipelineConfiguration ?? new DiagnosticPipelineConfiguration();
            taskScheduler = taskScheduler ?? TaskScheduler.Current;

            // An estimatie how many batches of events to allow inside the pipeline.
            // We want to be able to process full buffer of events, but also have enough batches in play in case of high concurrency.
            int MaxNumberOfBatchesInProgress = Math.Max(
                5 * this.pipelineConfiguration.MaxConcurrency,
                this.pipelineConfiguration.PipelineBufferSize / this.pipelineConfiguration.MaxEventBatchSize);

            this.Inputs = inputs;
            this.Sinks  = sinks;

            // Just play nice and make sure there is always something to enumerate on
            this.GlobalFilters = globalFilters ?? new IFilter[0];

            this.HealthReporter          = healthReporter;
            this.cancellationTokenSource = new CancellationTokenSource();
            var propagateCompletion = new DataflowLinkOptions()
            {
                PropagateCompletion = true
            };

            // One disposable for each input subscription.
            this.inputSubscriptions = new List <IDisposable>(inputs.Count);

            var inputBuffer = new BufferBlock <EventData>(
                new DataflowBlockOptions()
            {
                BoundedCapacity   = this.pipelineConfiguration.PipelineBufferSize,
                CancellationToken = this.cancellationTokenSource.Token,
                TaskScheduler     = taskScheduler
            });

            this.pipelineHead = inputBuffer;

            var batcher = new BatchBlock <EventData>(
                this.pipelineConfiguration.MaxEventBatchSize,
                new GroupingDataflowBlockOptions()
            {
                BoundedCapacity   = this.pipelineConfiguration.PipelineBufferSize,
                CancellationToken = this.cancellationTokenSource.Token,
                TaskScheduler     = taskScheduler
            }
                );

            inputBuffer.LinkTo(batcher, propagateCompletion);

            ISourceBlock <EventData[]> sinkSource;
            FilterAction filterTransform;

            if (this.GlobalFilters.Count > 0)
            {
                filterTransform = new FilterAction(
                    this.GlobalFilters,
                    this.cancellationTokenSource.Token,
                    MaxNumberOfBatchesInProgress,
                    this.pipelineConfiguration.MaxConcurrency,
                    healthReporter,
                    taskScheduler);
                var globalFiltersBlock = filterTransform.GetFilterBlock();
                batcher.LinkTo(globalFiltersBlock, propagateCompletion);
                sinkSource = globalFiltersBlock;
            }
            else
            {
                sinkSource = batcher;
            }

            bool usingBroadcastBlock = sinks.Count > 1;

            if (usingBroadcastBlock)
            {
                var broadcaster = new BroadcastBlock <EventData[]>(
                    (events) => events?.Select((e) => e.DeepClone()).ToArray(),
                    new DataflowBlockOptions()
                {
                    BoundedCapacity   = MaxNumberOfBatchesInProgress,
                    CancellationToken = this.cancellationTokenSource.Token,
                    TaskScheduler     = taskScheduler
                });
                sinkSource.LinkTo(broadcaster, propagateCompletion);
                sinkSource = broadcaster;
            }

            this.outputCompletionTasks = new List <Task>(sinks.Count);
            foreach (var sink in sinks)
            {
                ISourceBlock <EventData[]> outputSource = sinkSource;

                if (sink.Filters != null && sink.Filters.Count > 0)
                {
                    filterTransform = new FilterAction(
                        sink.Filters,
                        this.cancellationTokenSource.Token,
                        MaxNumberOfBatchesInProgress,
                        this.pipelineConfiguration.MaxConcurrency,
                        healthReporter,
                        taskScheduler);
                    var filterBlock = filterTransform.GetFilterBlock();

                    if (usingBroadcastBlock)
                    {
                        var lossReportingPropagator = new LossReportingPropagatorBlock <EventData[]>(this.HealthReporter);
                        sinkSource.LinkTo(lossReportingPropagator, propagateCompletion);
                        lossReportingPropagator.LinkTo(filterBlock, propagateCompletion);
                    }
                    else
                    {
                        sinkSource.LinkTo(filterBlock, propagateCompletion);
                    }
                    outputSource = filterBlock;
                }
                else if (usingBroadcastBlock)
                {
                    var lossReportingPropagator = new LossReportingPropagatorBlock <EventData[]>(this.HealthReporter);
                    sinkSource.LinkTo(lossReportingPropagator, propagateCompletion);
                    outputSource = lossReportingPropagator;
                }

                OutputAction outputAction = new OutputAction(
                    sink.Output,
                    this.cancellationTokenSource.Token,
                    MaxNumberOfBatchesInProgress,
                    this.pipelineConfiguration.MaxConcurrency,
                    healthReporter,
                    taskScheduler);
                var outputBlock = outputAction.GetOutputBlock();
                outputSource.LinkTo(outputBlock, propagateCompletion);
                this.outputCompletionTasks.Add(outputBlock.Completion);
            }

            IObserver <EventData> inputBufferObserver = new TargetBlockObserver <EventData>(inputBuffer, this.HealthReporter);

            foreach (var input in inputs)
            {
                this.inputSubscriptions.Add(input.Subscribe(inputBufferObserver));
            }

            this.disposed            = false;
            this.disposeDependencies = disposeDependencies;

            this.batcherTimer = new Timer(
                (_) => {
                try
                {
                    lock (this.batcherTimerDisposalLock)
                    {
                        if (!this.disposed)
                        {
                            batcher.TriggerBatch();

                            this.batcherTimer.Change(dueTime: TimeSpan.FromMilliseconds(this.pipelineConfiguration.MaxBatchDelayMsec), period: Timeout.InfiniteTimeSpan);
                        }
                    }
                }
                catch { }
            },
                state: null,
                dueTime: TimeSpan.FromMilliseconds(this.pipelineConfiguration.MaxBatchDelayMsec),
                period: Timeout.InfiniteTimeSpan);
        }
예제 #3
0
        public DiagnosticPipeline(
            IHealthReporter healthReporter,
            IReadOnlyCollection <IObservable <EventData> > inputs,
            IReadOnlyCollection <IFilter> globalFilters,
            IReadOnlyCollection <EventSink> sinks,
            DiagnosticPipelineConfiguration pipelineConfiguration = null,
            bool disposeDependencies = false)
        {
            Requires.NotNull(healthReporter, nameof(healthReporter));
            Requires.NotNull(inputs, nameof(inputs));
            Requires.Argument(inputs.Count > 0, nameof(inputs), "There must be at least one input");
            Requires.NotNull(sinks, nameof(sinks));
            Requires.Argument(sinks.Count > 0, nameof(sinks), "There must be at least one sink");

            this.pipelineConfiguration = pipelineConfiguration ?? new DiagnosticPipelineConfiguration();

            this.Inputs = inputs;
            this.Sinks  = sinks;
            // Just play nice and make sure there is always something to enumerate on
            this.GlobalFilters           = globalFilters ?? new IFilter[0];
            this.HealthReporter          = healthReporter;
            this.cancellationTokenSource = new CancellationTokenSource();
            var propagateCompletion = new DataflowLinkOptions()
            {
                PropagateCompletion = true
            };

            this.pipelineDisposables = new List <IDisposable>();

            var inputBuffer = new BufferBlock <EventData>(
                new DataflowBlockOptions()
            {
                BoundedCapacity   = this.pipelineConfiguration.PipelineBufferSize,
                CancellationToken = this.cancellationTokenSource.Token
            });

            this.pipelineHead = inputBuffer;

            var batcher = new BatchBlock <EventData>(
                this.pipelineConfiguration.MaxEventBatchSize,
                new GroupingDataflowBlockOptions()
            {
                BoundedCapacity   = this.pipelineConfiguration.MaxEventBatchSize,
                CancellationToken = this.cancellationTokenSource.Token
            }
                );

            this.pipelineDisposables.Add(inputBuffer.LinkTo(batcher, propagateCompletion));

            this.pipelineDisposables.Add(new Timer(
                                             (unused) => batcher.TriggerBatch(),
                                             state: null,
                                             dueTime: TimeSpan.FromMilliseconds(this.pipelineConfiguration.MaxBatchDelayMsec),
                                             period: TimeSpan.FromMilliseconds(this.pipelineConfiguration.MaxBatchDelayMsec)));

            ISourceBlock <EventData[]> sinkSource;
            FilterAction filterTransform;

            if (this.GlobalFilters.Count > 0)
            {
                filterTransform = new FilterAction(
                    this.GlobalFilters,
                    this.cancellationTokenSource.Token,
                    this.pipelineConfiguration.MaxEventBatchSize,
                    this.pipelineConfiguration.MaxConcurrency,
                    healthReporter);
                var globalFiltersBlock = filterTransform.GetFilterBlock();
                this.pipelineDisposables.Add(batcher.LinkTo(globalFiltersBlock, propagateCompletion));
                sinkSource = globalFiltersBlock;
            }
            else
            {
                sinkSource = batcher;
            }

            if (sinks.Count > 1)
            {
                var broadcaster = new BroadcastBlock <EventData[]>(
                    (events) => events?.Select((e) => e.DeepClone()).ToArray(),
                    new DataflowBlockOptions()
                {
                    BoundedCapacity   = this.pipelineConfiguration.MaxEventBatchSize,
                    CancellationToken = this.cancellationTokenSource.Token
                });
                this.pipelineDisposables.Add(sinkSource.LinkTo(broadcaster, propagateCompletion));
                sinkSource = broadcaster;
            }

            foreach (var sink in sinks)
            {
                ISourceBlock <EventData[]> outputSource = sinkSource;
                if (sink.Filters != null && sink.Filters.Count > 0)
                {
                    filterTransform = new FilterAction(
                        sink.Filters,
                        this.cancellationTokenSource.Token,
                        this.pipelineConfiguration.MaxEventBatchSize,
                        this.pipelineConfiguration.MaxConcurrency,
                        healthReporter);
                    var filterBlock = filterTransform.GetFilterBlock();
                    this.pipelineDisposables.Add(sinkSource.LinkTo(filterBlock, propagateCompletion));
                    outputSource = filterBlock;
                }

                OutputAction outputAction = new OutputAction(
                    sink.Output,
                    this.cancellationTokenSource.Token,
                    this.pipelineConfiguration.MaxEventBatchSize,
                    this.pipelineConfiguration.MaxConcurrency,
                    healthReporter);
                ActionBlock <EventData[]> outputBlock = outputAction.GetOutputBlock();
                this.pipelineDisposables.Add(outputSource.LinkTo(outputBlock, propagateCompletion));
            }

            IObserver <EventData> inputBufferObserver = new TargetBlockObserver <EventData>(inputBuffer, this.HealthReporter);

            this.inputSubscriptions = new List <IDisposable>(inputs.Count);
            foreach (var input in inputs)
            {
                this.inputSubscriptions.Add(input.Subscribe(inputBufferObserver));
            }

            this.disposed            = false;
            this.disposeDependencies = disposeDependencies;
        }