/// <summary>Initializes this <see cref="BatchedJoinBlock{T1,T2}"/> with the specified configuration.</summary> /// <param name="batchSize">The number of items to group into a batch.</param> /// <param name="dataflowBlockOptions">The options with which to configure this <see cref="BatchedJoinBlock{T1,T2}"/>.</param> /// <exception cref="System.ArgumentOutOfRangeException">The <paramref name="batchSize"/> must be positive.</exception> /// <exception cref="System.ArgumentNullException">The <paramref name="dataflowBlockOptions"/> is null (Nothing in Visual Basic).</exception> public BatchedJoinBlock(int batchSize, GroupingDataflowBlockOptions dataflowBlockOptions) { // Validate arguments if (batchSize < 1) { throw new ArgumentOutOfRangeException(nameof(batchSize), SR.ArgumentOutOfRange_GenericPositive); } if (dataflowBlockOptions == null) { throw new ArgumentNullException(nameof(dataflowBlockOptions)); } if (!dataflowBlockOptions.Greedy) { throw new ArgumentException(SR.Argument_NonGreedyNotSupported, nameof(dataflowBlockOptions)); } if (dataflowBlockOptions.BoundedCapacity != DataflowBlockOptions.Unbounded) { throw new ArgumentException(SR.Argument_BoundedCapacityNotSupported, nameof(dataflowBlockOptions)); } // Store arguments _batchSize = batchSize; dataflowBlockOptions = dataflowBlockOptions.DefaultOrClone(); // Configure the source _source = new SourceCore <Tuple <IList <T1>, IList <T2> > >( this, dataflowBlockOptions, owningSource => ((BatchedJoinBlock <T1, T2>)owningSource).CompleteEachTarget()); // The action to run when a batch should be created. This is typically called // when we have a full batch, but it will also be called when we're done receiving // messages, and thus when there may be a few stragglers we need to make a batch out of. Action createBatchAction = () => { if (_target1.Count > 0 || _target2.Count > 0) { _source.AddMessage(Tuple.Create(_target1.GetAndEmptyMessages(), _target2.GetAndEmptyMessages())); } }; // Configure the targets _sharedResources = new BatchedJoinBlockTargetSharedResources( batchSize, dataflowBlockOptions, createBatchAction, () => { createBatchAction(); _source.Complete(); }, _source.AddException, Complete); _target1 = new BatchedJoinBlockTarget <T1>(_sharedResources); _target2 = new BatchedJoinBlockTarget <T2>(_sharedResources); // It is possible that the source half may fault on its own, e.g. due to a task scheduler exception. // In those cases we need to fault the target half to drop its buffered messages and to release its // reservations. This should not create an infinite loop, because all our implementations are designed // to handle multiple completion requests and to carry over only one. _source.Completion.ContinueWith((completed, state) => { var thisBlock = ((BatchedJoinBlock <T1, T2>)state !) as IDataflowBlock; Debug.Assert(completed.IsFaulted, "The source must be faulted in order to trigger a target completion."); thisBlock.Fault(completed.Exception !); }, this, CancellationToken.None, Common.GetContinuationOptions() | TaskContinuationOptions.OnlyOnFaulted, TaskScheduler.Default); // Handle async cancellation requests by declining on the target Common.WireCancellationToComplete( dataflowBlockOptions.CancellationToken, _source.Completion, state => ((BatchedJoinBlock <T1, T2>)state !).CompleteEachTarget(), this); #if FEATURE_TRACING DataflowEtwProvider etwLog = DataflowEtwProvider.Log; if (etwLog.IsEnabled()) { etwLog.DataflowBlockCreated(this, dataflowBlockOptions); } #endif }
/// <summary> /// Stores the untrusted enumerable into the source core. /// This method does not go through the reordering buffer. /// </summary> /// <param name="outputItems">The untrusted enumerable.</param> private void StoreOutputItemsNonReorderedWithIteration(IEnumerable <TOutput> outputItems) { // The _source we're adding to isn't thread-safe, so we need to determine // whether we need to lock. If the block is configured with a max degree // of parallelism of 1, then only one transform can run at a time, and so // we don't need to lock. Similarly, if there's a reordering buffer, then // it guarantees that we're invoked serially, and we don't need to lock. bool isSerial = _target.DataflowBlockOptions.MaxDegreeOfParallelism == 1 || _reorderingBuffer != null; // If we're bounding, we need to increment the bounded count // for each individual item as we enumerate it. if (_target.IsBounded) { // When the input item that generated this // output was loaded, we incremented the bounding count. If it only // output a single a item, then we don't need to touch the bounding count. // Otherwise, we need to adjust the bounding count accordingly. bool outputFirstItem = false; try { foreach (TOutput item in outputItems) { if (outputFirstItem) { _target.ChangeBoundingCount(count: 1); } else { outputFirstItem = true; } if (isSerial) { _source.AddMessage(item); } else { lock (ParallelSourceLock) // don't hold lock while enumerating { _source.AddMessage(item); } } } } finally { if (!outputFirstItem) { _target.ChangeBoundingCount(count: -1); } } } // If we're not bounding, just output each individual item. else { if (isSerial) { foreach (TOutput item in outputItems) { _source.AddMessage(item); } } else { foreach (TOutput item in outputItems) { lock (ParallelSourceLock) // don't hold lock while enumerating { _source.AddMessage(item); } } } } }