/// <summary> /// Links to the given target block. /// Beware that FlowCompletion would wait for the completion of the given block. /// </summary> /// <param name="targetBlock"></param> /// <param name="linkOptions"></param> /// <returns></returns> public IFlowBlock LinkTo(ITargetBlock <TOut> targetBlock, DataflowLinkOptions linkOptions = null) { if (_transformer != null) { if (linkOptions != null) { _transformer.LinkTo(targetBlock, linkOptions); } else { linkOptions = new DataflowLinkOptions() { PropagateCompletion = true }; _transformer.LinkTo(targetBlock, linkOptions); } AddCompletionTask(targetBlock.Completion); } else { BroadcastTo(targetBlock); //throw new Exception("Can't link blocks which don`t produce data! Use " + nameof(LinkTo) + " instead!"); } return(this); }
/// <summary> /// Handles the completion of all blocks up to this one. /// </summary> /// <param name="obj"></param> private async Task HandleCompletion(Task obj) { //Link to null, so that the flow completes if (_lastTransformerBlockOnCompletion != null) { var nullt = DataflowBlock.NullTarget <TOut>(); _lastTransformerBlockOnCompletion.LinkTo(nullt); } if (_transformerOnCompletion != null) { IEnumerable <TOut> elements = GetCollectedItems(); if (elements != null) { //Post all items to the first transformer foreach (TOut element in elements) { if (element == null) { continue; } _transformerOnCompletion.SendAsync(element).Wait(); } } else { } //Set it to complete _transformerOnCompletion.Complete(); //Wait for the last transformer to complete _lastTransformerBlockOnCompletion.Completion.Wait(); } await WaitForContinuingTasks(); }
public IMessagePipeline <TSource, T> Block <T>(IPropagatorBlock <TDestination, T> block) { if (_pipeline is null) { return(new MessagePipeline <TSource, T>(_subscription, (IPropagatorBlock <TSource, T>)block)); } _pipeline.LinkTo(block); return(new MessagePipeline <TSource, T>(_subscription, DataflowBlock.Encapsulate(_pipeline, block))); }
/// <summary> /// Initializes the DataflowBlock /// </summary> /// <param name="importJobInformation"><see cref="ImportJobInformation"/> of the ImportJob this DataflowBlock belongs to</param> /// <param name="inputBlockOptions"><see cref="ExecutionDataflowBlockOptions"/> for the <see cref="InputBlock"/></param> /// <param name="innerBlockOptions"><see cref="ExecutionDataflowBlockOptions"/> for the <see cref="InnerBlock"/></param> /// <param name="outputBlockOptions"><see cref="ExecutionDataflowBlockOptions"/> for the <see cref="OutputBlock"/></param> /// <param name="blockname">Name of this DataflowBlock (must be unique in a given chain of DataflowBlocks)</param> /// <param name="isRestorePointAfterDeserialization"> /// <c>true</c>, if after deserialiization from disk, <see cref="PendingImportResourceNewGen"/>s are restored to /// this block. If <c>false</c>, they are restored to the last passed DataflowBlock having this parameter set to <c>true</c> /// </param> /// <param name="parentImportJobController">ImportJobController to which this DataflowBlock belongs</param> protected ImporterWorkerDataflowBlockBase(ImportJobInformation importJobInformation, ExecutionDataflowBlockOptions inputBlockOptions, ExecutionDataflowBlockOptions innerBlockOptions, ExecutionDataflowBlockOptions outputBlockOptions, String blockname, bool isRestorePointAfterDeserialization, ImportJobController parentImportJobController, CancellationToken ct) { _blockName = blockname; _isRestorePointAfterDeserialization = isRestorePointAfterDeserialization; _ct = ct; ImportJobInformation = importJobInformation; InputBlockOptions = inputBlockOptions; InnerBlockOptions = innerBlockOptions; OutputBlockOptions = outputBlockOptions; ParentImportJobController = parentImportJobController; _stopWatch = new Stopwatch(); _tcs = new TaskCompletionSource <object>(); Activated = new AsyncManualResetEvent(InnerBlockOptions.CancellationToken); InputBlock = new TransformBlock <PendingImportResourceNewGen, PendingImportResourceNewGen>(p => InputBlockMethod(p), InputBlockOptions); OutputBlock = new TransformManyBlock <PendingImportResourceNewGen, PendingImportResourceNewGen>(p => OutputBlockMethod(p), OutputBlockOptions); // ReSharper disable once DoNotCallOverridableMethodsInConstructor InnerBlock = CreateInnerBlock(); InnerBlock.LinkTo(OutputBlock, new DataflowLinkOptions { PropagateCompletion = true }); InputBlock.Completion.ContinueWith(OnAnyBlockFaulted, TaskContinuationOptions.OnlyOnFaulted); InnerBlock.Completion.ContinueWith(OnAnyBlockFaulted, TaskContinuationOptions.OnlyOnFaulted); OutputBlock.Completion.ContinueWith(OnAnyBlockFaulted, TaskContinuationOptions.OnlyOnFaulted); Task.WhenAll(InputBlock.Completion, InnerBlock.Completion, OutputBlock.Completion).ContinueWith(OnAllBlocksFinished); }
public S3BatchRemover(IAmazonS3 s3Client, string bucketName, int batchSize, int maxDegreeOfParallelism, CancellationToken cancellationToken = default) { _s3Client = s3Client ?? throw new ArgumentNullException(nameof(s3Client)); _bucketName = bucketName ?? throw new ArgumentNullException(nameof(bucketName)); _cancellationToken = cancellationToken; _targetBlock = new ActionBlock <string[]>(RemoveFiles, new ExecutionDataflowBlockOptions { CancellationToken = cancellationToken, MaxDegreeOfParallelism = maxDegreeOfParallelism }); _propagatorBlock = new BatchBlock <string>(batchSize, new GroupingDataflowBlockOptions { CancellationToken = cancellationToken, BoundedCapacity = batchSize * Environment.ProcessorCount }); _ = _propagatorBlock.LinkTo(_targetBlock, new DataflowLinkOptions { PropagateCompletion = true }); }
protected override void Initialize() { IPropagatorBlock <IProjectVersionedValue <ILaunchSettings>, IProjectVersionedValue <IReadOnlyList <IEnumValue> > > debugProfilesBlock = DataflowBlockSlim.CreateTransformBlock <IProjectVersionedValue <ILaunchSettings>, IProjectVersionedValue <IReadOnlyList <IEnumValue> > >( update => { // Compute the new enum values from the profile provider var generatedResult = DebugProfileEnumValuesGenerator.GetEnumeratorEnumValues(update.Value).ToImmutableList(); _dataSourceVersion++; ImmutableDictionary <NamedIdentity, IComparable> dataSources = ImmutableDictionary <NamedIdentity, IComparable> .Empty.Add(DataSourceKey, DataSourceVersion); return(new ProjectVersionedValue <IReadOnlyList <IEnumValue> >(generatedResult, dataSources)); }); IBroadcastBlock <IProjectVersionedValue <IReadOnlyList <IEnumValue> > > broadcastBlock = DataflowBlockSlim.CreateBroadcastBlock <IProjectVersionedValue <IReadOnlyList <IEnumValue> > >(); // The interface has two definitions of SourceBlock: one from // ILaunchSettingsProvider, and one from IProjectValueDataSource<T> (via // IVersionedLaunchSettingsProvider). We need the cast to pick the proper one. _launchProfileProviderLink = ((IProjectValueDataSource <ILaunchSettings>)LaunchSettingProvider).SourceBlock.LinkTo( debugProfilesBlock, linkOptions: DataflowOption.PropagateCompletion); JoinUpstreamDataSources(LaunchSettingProvider); _debugProviderLink = debugProfilesBlock.LinkTo(broadcastBlock, DataflowOption.PropagateCompletion); _publicBlock = broadcastBlock.SafePublicize(); }
public ThrottledProducerConsumer(TimeSpan Interval, int MaxPerInterval, Int32 QueueBoundedMax = 5, Action <T> ConsumerAction = null, Int32 MaxConsumers = 1, Int32 MaxThrottleBuffer = 20, Int32 MaxDegreeOfParallelism = 10) { //-- Probably best to link MaxPerInterval and MaxThrottleBuffer // and MaxConsumers with MaxDegreeOfParallelism var consumerOptions = new ExecutionDataflowBlockOptions { BoundedCapacity = 1, }; var linkOptions = new DataflowLinkOptions { PropagateCompletion = true, }; //-- Create the Queue _queue = new BufferBlock <T>(new DataflowBlockOptions { BoundedCapacity = QueueBoundedMax, }); //-- Create and link the throttle block _throttleBlock = CreateThrottleBlock <T>(Interval, MaxPerInterval); _queue.LinkTo(_throttleBlock, linkOptions); //-- Create and link the consumer(s) to the throttle block var consumerAction = (ConsumerAction != null) ? ConsumerAction : new Action <T>(ConsumeItem); _consumers = new List <Task>(); for (int i = 0; i < MaxConsumers; i++) { var consumer = new ActionBlock <T>(consumerAction, consumerOptions); _throttleBlock.LinkTo(consumer, linkOptions); _consumers.Add(consumer.Completion); } //-- TODO: Add some cancellation tokens to shut this thing down }
public void LinkTo(ITargetBlock <OperationMessage> target) { _endBlock.LinkTo(target, new DataflowLinkOptions { PropagateCompletion = true }); }
public KeyProcessor(IActiveWord activeWord) { _activeWord = activeWord; _incomingKeyPresses = new BroadcastBlock <KeyData>(ProcessKey); _incomingCompletedWords = new TransformManyBlock <KeyData, CompletedWord>(ProcessWord); _incomingKeyPresses.LinkTo(_incomingCompletedWords); }
public void Connect() { first = init.Value.Create(); last = slicer.Value.Create(); first.LinkTo(last, new DataflowLinkOptions { PropagateCompletion = true }); }
public DataFlowMessageBus() { _broadcast = new BroadcastBlock <T>(null); _mailBox = new BufferBlock <T>(); _cleanup.Add(_mailBox.LinkTo(_broadcast, new DataflowLinkOptions { PropagateCompletion = true })); }
private TcpTransport(string host, int port) { _host = host; _port = port; _bufferBlock = new BufferBlock <string>(); _senderBlock = CreateSenderBlock(); _bufferBlock.LinkTo(_senderBlock); }
public IDisposable LinkTo(ITargetBlock <TOutput> target, DataflowLinkOptions linkOptions) { if (_task.Status == TaskStatus.Created) { _task.Start(); } return(_block.LinkTo(target, linkOptions)); }
// Chains blocks fluently and propagates completion by default. public static IPropagatorBlock <TInput, TOutput> Chain <TInput, TLink, TOutput>(this IPropagatorBlock <TInput, TLink> source, IPropagatorBlock <TLink, TOutput> target, DataflowLinkOptions?options = null) { source.LinkTo( target, options ?? new DataflowLinkOptions { PropagateCompletion = true }); return(DataflowBlock.Encapsulate <TInput, TOutput>(source, target)); }
protected override async Task StartTask(CancellationToken token) { _consumerBlock = new ActionBlock <T>(item => { OnItemAvailable?.Invoke(item); }); _propagationBlock = CreatePropagationBlock(); _propagationBlock.LinkTo(_consumerBlock, new DataflowLinkOptions { PropagateCompletion = true }); await DoWork(token); }
public void Activate(IMediaBrowsing mediaBrowsingCallback, IImportResultHandler importResultHandler) { _mediaBrowsingCallback = mediaBrowsingCallback; _importResultHandler = importResultHandler; _suspensionLink = InputBlock.LinkTo(InnerBlock, new DataflowLinkOptions { PropagateCompletion = true }); _stopWatch.Start(); Activated.Set(); }
public PrioritizedQueueServiceTask(string name, int idleTimeinSeconds = 10) : base(name) { _idleTimeInSeconds = idleTimeinSeconds; _loopWaitHandleList = new Dictionary <int, TaskEventWaitHandle>(); _consumerBlock = new ActionBlock <T>(item => { OnItemAvailable?.Invoke(item); }); _propagationBlock = CreatePropagationBlock(); _propagationBlock.LinkTo(_consumerBlock, new DataflowLinkOptions { PropagateCompletion = true }); }
public MongoUpdateBatch(IMongoCollection <TRecord> collection, uint batchSize = 10000, CancellationToken?cancellationToken = null) { _block = BatchedBlockingBlock <FindAndModifyArgs <TRecord> > .CreateBlock(batchSize); _block.LinkTo(new ActionBlock <FindAndModifyArgs <TRecord>[]>(UpdateAll), new DataflowLinkOptions { PropagateCompletion = true }); _collection = collection; _cancellationToken = cancellationToken == null ? CancellationToken.None : cancellationToken.Value; }
private TransformSafeBlock(IPropagatorBlock <Either <TLeft, TRightInput>, Either <TLeft, TRightOutput> > transformRightBlock) { this.transformRightBlock = transformRightBlock; this.transformRightBlock.LinkTo(outputBufferBlock.AddInput(), new DataflowLinkOptions { PropagateCompletion = true }); transformLeftBlock.LinkTo(outputBufferBlock.AddInput(), new DataflowLinkOptions { PropagateCompletion = true }); }
/// <summary> /// Runs the donut. /// </summary> /// <param name="donut"></param> /// <param name="getFeatureGenerator"></param> /// <returns></returns> public async Task <IHarvesterResult> Run(TDonut donut, IFeatureGenerator <TData> getFeatureGenerator) { var integration = donut.Context.Integration; //Create our destination block var donutBlock = donut.CreateDataflowBlock(getFeatureGenerator); var dataProcessingBlock = donutBlock.FlowBlock; _featuresBlock = donutBlock.FeaturePropagator; var insertCreator = new TransformBlock <FeaturesWrapper <TData>, BsonDocument>((x) => { var rawFeatures = new BsonDocument(); var featuresDocument = new IntegratedDocument(rawFeatures); //add some cleanup, or feature document definition, because right now the original document is used //either clean it up or create a new one with just the features. //if (doc.Document.Value.Contains("events")) doc.Document.Value.Remove("events"); //if (doc.Document.Value.Contains("browsing_statistics")) doc.Document.Value.Remove("browsing_statistics"); foreach (var featurePair in x.Features) { var name = featurePair.Key; if (string.IsNullOrEmpty(name)) { continue; } var featureval = featurePair.Value; rawFeatures.Set(name, BsonValue.Create(featureval)); } featuresDocument.IntegrationId = integration.Id; featuresDocument.APIId = integration.APIKey.Id; x.Features = null; return(rawFeatures); }); var insertBatcher = new MongoInsertBatch <BsonDocument>(_featuresCollection, 3000); insertCreator.LinkTo(insertBatcher.BatchBlock, new DataflowLinkOptions { PropagateCompletion = true }); //Insert our features _featuresBlock.LinkTo(insertCreator, new DataflowLinkOptions { PropagateCompletion = true }); //After all data is processed, extract the features dataProcessingBlock.ContinueWith(() => { var extractionTask = RunFeatureExtraction(donut); Task.WaitAll(extractionTask); }); _harvester.SetDestination(dataProcessingBlock); var harvesterRun = await _harvester.Run(); //If we have to repeat it, handle this.. return(harvesterRun); }
protected override IDisposable LinkExternalInput(ITargetBlock <IProjectVersionedValue <UpToDateCheckImplicitConfiguredInput> > targetBlock) { Assumes.Present(_configuredProject.Services.ProjectSubscription); // Initial state is empty. We will evolve this reference over time, updating it iteratively // on each new data update. UpToDateCheckImplicitConfiguredInput state = UpToDateCheckImplicitConfiguredInput.Empty; IPropagatorBlock <IProjectVersionedValue <UpdateValues>, IProjectVersionedValue <UpToDateCheckImplicitConfiguredInput> > transformBlock = DataflowBlockSlim.CreateTransformBlock <IProjectVersionedValue <UpdateValues>, IProjectVersionedValue <UpToDateCheckImplicitConfiguredInput> >(Transform); IProjectValueDataSource <IProjectSubscriptionUpdate> source1 = _configuredProject.Services.ProjectSubscription.JointRuleSource; IProjectValueDataSource <IProjectSubscriptionUpdate> source2 = _configuredProject.Services.ProjectSubscription.SourceItemsRuleSource; IProjectValueDataSource <IProjectSnapshot> source3 = _configuredProject.Services.ProjectSubscription.ProjectSource; IProjectItemSchemaService source4 = _projectItemSchemaService; IProjectValueDataSource <IProjectCatalogSnapshot> source5 = _configuredProject.Services.ProjectSubscription.ProjectCatalogSource; return(new DisposableBag { // Sync-link various sources to our transform block ProjectDataSources.SyncLinkTo( source1.SourceBlock.SyncLinkOptions(DataflowOption.WithRuleNames(ProjectPropertiesSchemas)), source2.SourceBlock.SyncLinkOptions(), source3.SourceBlock.SyncLinkOptions(), source4.SourceBlock.SyncLinkOptions(), source5.SourceBlock.SyncLinkOptions(), target: transformBlock, linkOptions: DataflowOption.PropagateCompletion, CancellationToken.None), // Link the transform block to our target block transformBlock.LinkTo(targetBlock, DataflowOption.PropagateCompletion), JoinUpstreamDataSources(source1, source2, source3, source4, source5) }); IProjectVersionedValue <UpToDateCheckImplicitConfiguredInput> Transform(IProjectVersionedValue <UpdateValues> e) { var snapshot = e.Value.Item3 as IProjectSnapshot2; Assumes.NotNull(snapshot); state = state.Update( jointRuleUpdate: e.Value.Item1, sourceItemsUpdate: e.Value.Item2, projectSnapshot: snapshot, projectItemSchema: e.Value.Item4, projectCatalogSnapshot: e.Value.Item5, configuredProjectVersion: e.DataSourceVersions[ProjectDataSources.ConfiguredProjectVersion]); return(new ProjectVersionedValue <UpToDateCheckImplicitConfiguredInput>(state, e.DataSourceVersions)); } }
public Unsubscribe Subscribe(Action <T> handler) { var actionBlock = new ActionBlock <T>(handler); var disposable = _broadcast.LinkTo(actionBlock); _cleanup.Add(disposable); return(() => { disposable.Dispose(); _cleanup.Remove(disposable); }); }
public WebSocketWriterPipeline(WebSocket socket, JsonSerializerSettings serializerSettings) { _socket = socket; _serializerSettings = serializerSettings; _endBlock = CreateMessageWriter(); _startBlock = CreateWriterJsonTransformer(); _startBlock.LinkTo(_endBlock, new DataflowLinkOptions { PropagateCompletion = true }); }
public FilterFileIfExistsBlock(ILogger <FilterFileIfExistsBlock> logger) { _logger = logger; Block = new TransformBlock <PipelineItem, PipelineItem>(CheckIfFileAlreadyExistsTransform); var skippedLogging = new ActionBlock <PipelineItem>(LogFilteredFiles); Block.LinkTo(skippedLogging, new DataflowLinkOptions { PropagateCompletion = true }, fd => fd.IsDuplicate); }
public AnalysisActionBlock() { _cts = new CancellationTokenSource(); _tcs = new TaskCompletionSource <object>(); _pendingMediaAnalysis = new ConcurrentDictionary <Guid, AnalysisManagerAction>(); // This block is separate from the main block network. It has a bounded capacity // of 2 to ensure that at most we have 1 action being processed and 1 pending. // Any additional persist requests will be covered by the already pending action. _persistBlock = new ActionBlock <object>(_ => PersistPendingActions(), new ExecutionDataflowBlockOptions { BoundedCapacity = 2, MaxDegreeOfParallelism = 1 }); // Input block that batches up multiple actions _inputBlock = new TimeoutBatchBlock <AnalysisManagerAction>(BATCH_SIZE, BATCH_TIMEOUT, new GroupingDataflowBlockOptions { CancellationToken = _cts.Token }); // Give the processing block an unlimited bounded capacity to ensure // that no actions are ever dropped. Any unprocessed actions will be // persisted and restored on server startup _innerBlock = new TransformBlock <AnalysisManagerAction[], AnalysisManagerAction[]>( new Func <AnalysisManagerAction[], Task <AnalysisManagerAction[]> >(InnerBlockMethod), new ExecutionDataflowBlockOptions { CancellationToken = _cts.Token, MaxDegreeOfParallelism = 2 }); // Action block to mark actions as completed _outputBlock = new ActionBlock <AnalysisManagerAction[]>( a => OutputBlockMethod(a), new ExecutionDataflowBlockOptions { CancellationToken = _cts.Token }); ISettingsManager sm = ServiceRegistration.Get <ISettingsManager>(); var settings = sm.Load <TranscodingServiceSettings>(); // Link the blocks and handle fault/completion propagation _inputBlock.LinkTo(_innerBlock, new DataflowLinkOptions { PropagateCompletion = true }); _innerBlock.LinkTo(_outputBlock, new DataflowLinkOptions { PropagateCompletion = true }); _inputBlock.Completion.ContinueWith(OnAnyBlockFaulted, TaskContinuationOptions.OnlyOnFaulted); _innerBlock.Completion.ContinueWith(OnAnyBlockFaulted, TaskContinuationOptions.OnlyOnFaulted); _outputBlock.Completion.ContinueWith(OnAnyBlockFaulted, TaskContinuationOptions.OnlyOnFaulted); Task.WhenAll(_inputBlock.Completion, _innerBlock.Completion, _outputBlock.Completion).ContinueWith(OnAllBlocksFinished); }
public TimeoutBatchBlock(int batchSize, int timeout, GroupingDataflowBlockOptions dataflowBlockOptions) { _batchBlock = new BatchBlock <T>(batchSize, dataflowBlockOptions); _timeoutTimer = new Timer(o => _batchBlock.TriggerBatch()); _timeoutBlock = new TransformBlock <T, T>(o => { _timeoutTimer.Change(timeout, Timeout.Infinite); return(o); }, new ExecutionDataflowBlockOptions { CancellationToken = dataflowBlockOptions.CancellationToken }); _link = _timeoutBlock.LinkTo(_batchBlock, new DataflowLinkOptions { PropagateCompletion = true }); }
protected override IDisposable LinkExternalInput(ITargetBlock <IProjectVersionedValue <IProjectTreePropertiesProvider> > targetBlock) { JoinUpstreamDataSources(_orderedItemSource); IPropagatorBlock <IProjectVersionedValue <IReadOnlyCollection <ProjectItemIdentity> >, IProjectVersionedValue <IProjectTreePropertiesProvider> > providerProducerBlock = DataflowBlockSlim.CreateTransformBlock <IProjectVersionedValue <IReadOnlyCollection <ProjectItemIdentity> >, IProjectVersionedValue <IProjectTreePropertiesProvider> >( orderedItems => { return(new ProjectVersionedValue <IProjectTreePropertiesProvider>(new TreeItemOrderPropertyProvider(orderedItems.Value, _project), orderedItems.DataSourceVersions)); }, new ExecutionDataflowBlockOptions() { NameFormat = "Ordered Tree Item Input: {1}" }); providerProducerBlock.LinkTo(targetBlock, DataflowOption.PropagateCompletion); return(_orderedItemSource.SourceBlock.LinkTo(providerProducerBlock, DataflowOption.PropagateCompletion)); }
/// <summary> /// Initializes a new instance of the <see cref="BatchLogTargetBase"/> class. /// </summary> /// <param name="batchSize">Size of the log batch.</param> /// <param name="flushInterval">The flush interval. /// The interval with which to initiate a batching operation even if the /// number of currently queued logs is less than the <paramref name="batchSize"/>. /// </param> /// <param name="maxDegreeOfParallelism">The max degree of parallelism the target instance will log batch entries (ie. <see cref="Log"/> method).</param> protected BatchLogTargetBase(Int32 batchSize, TimeSpan flushInterval, Int32 maxDegreeOfParallelism) { _FlushInterval = flushInterval; _BatchBlock = new BatchBlock <LogEntry>(batchSize); _ActionBlock = new ActionBlock <IEnumerable <LogEntry> >( logEntries => { Log(logEntries); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }); _BatchBlock.LinkTo(_ActionBlock); _FlushTimer = new Timer(FlushTimerCallback, null, _FlushInterval, TimeSpan.FromMilliseconds(-1)); }
public MongoInsertBatch(IMongoCollection <TRecord> collection, uint batchSize = 1000, CancellationToken?cancellationToken = null) { _batchesSent = 0; _batchBlock = BatchedBlockingBlock <TRecord> .CreateBlock(batchSize); _cancellationToken = cancellationToken == null ? CancellationToken.None : cancellationToken.Value; Func <TRecord[], Task> targetAction = InsertAll; _actionBlock = new ActionBlock <TRecord[]>(targetAction, new ExecutionDataflowBlockOptions { BoundedCapacity = (int)batchSize, CancellationToken = _cancellationToken }); _batchBlock.LinkTo(_actionBlock, new DataflowLinkOptions { PropagateCompletion = true }); _collection = collection; }
/// <summary> /// Initializes a new instance of the <see cref="BatchLogTargetBase"/> class. /// </summary> /// <param name="batchSize">Size of the log batch.</param> /// <param name="flushInterval">The flush interval. /// The interval with which to initiate a batching operation even if the /// number of currently queued logs is less than the <paramref name="batchSize"/>. /// </param> /// <param name="maxDegreeOfParallelism">The max degree of parallelism the target instance will log batch entries (ie. <see cref="Log"/> method).</param> protected BatchLogTargetBase(Int32 batchSize, TimeSpan flushInterval, Int32 maxDegreeOfParallelism) { _FlushInterval = flushInterval; _BatchBlock = new BatchBlock<LogEntry>(batchSize); _ActionBlock = new ActionBlock<IEnumerable<LogEntry>>( logEntries => { Log(logEntries); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }); _BatchBlock.LinkTo(_ActionBlock); _FlushTimer = new Timer(FlushTimerCallback, null, _FlushInterval, TimeSpan.FromMilliseconds(-1)); }
public Fetcher(FetcherOptions options, Func <FetchTarget, Task <FetchResult> > transform = null) { _options = options; transform = transform ?? Fetch; _input = new TransformBlock <FetchTarget, FetchResult>(transform, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = -1 }); _output = new BroadcastBlock <FetchResult>(x => x); var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; _input.LinkTo(_output, linkOptions); }