public static EventConsumptionWorker CreateWorker(IEventConsumerAsync consumer, string subscriptionName, bool batched) { var worker = new EventConsumptionWorker( eventConsumer: consumer, subscriptionName: subscriptionName, logger: serviceProvider.GetRequiredService <ILoggerFactory>().CreateLogger <EventConsumptionWorker>(), batchSize: batched ? 300 : 1, consumeModel: batched ? ConsumeModel.Batch : ConsumeModel.Single, consumeAction: !batched ? async(ceW) => { //Console.WriteLine($"Consumed {ceW.Id} from thread {System.Threading.Thread.CurrentThread.ManagedThreadId}."); await Task.Delay(1); // Some processing delay //if (DateTime.UtcNow.Millisecond < 50) // throw new Exception("This exception was unhandled by the ConsumeAction."); //if (DateTime.UtcNow.Millisecond > 900) // return ConsumeResult.MustRetry("ConsumeAction decided this event has to be retried."); return(ConsumeResult.Succeeded); } : default(Func <ConsumableEvent, Task <ConsumeResult> >), consumeBatchAction: batched ? async(ces) => { await Task.Delay(ces.Count()); return(ces .Select(ce => new KeyValuePair <Int64, ConsumeResult>(ce.Id, ConsumeResult.Succeeded)) // Return success for all .ToDictionary(kvp => kvp.Key, kvp => kvp.Value)); } : default(Func <IEnumerable <ConsumableEvent>, Task <IDictionary <Int64, ConsumeResult> > >) ); return(worker); }
/// <summary> /// Instantiates a PollingTask /// </summary> /// <param name="subscriptionName"></param> /// <param name="minBackOffDelayInMs">Minimum backoff delay in milliseconds. If 0, then processing will use up to 100% CPU!</param> /// <param name="maxBackOffDelayInMs">Maximum backoff delay in milliseconds. Backoff-delay will increment exponentially up until this value.</param> /// <param name="batchSize">Number of events to process in parallel (> 1 can result in slower processing when ordered delivery is used)</param> /// <param name="visibilityTimeout">Number of seconds the business event must be locked</param> /// <param name="eventConsumer">EventConsumer instance to use</param> /// <param name="logger">ILogger to use for logging purposes</param> /// <param name="housekeepingIntervalMin">Interval between housekeeping intervals. Set to 0 to disable housekeeping.</param> /// <param name="consumeAction">Action that must be invoked for each event.</param> /// <param name="consumeBatchAction">Action that must be invoked for a batch of events.</param> public EventConsumptionWorker(IEventConsumerAsync eventConsumer, string subscriptionName, Func <ConsumableEvent, Task <ConsumeResult> > consumeAction = null, Func <IEnumerable <ConsumableEvent>, Task <IDictionary <Int64, ConsumeResult> > > consumeBatchAction = null, ConsumeModel consumeModel = ConsumeModel.Single, int visibilityTimeout = 60, ILogger logger = null, int minBackOffDelayInMs = 1, int maxBackOffDelayInMs = 60000, int batchSize = 1, int housekeepingIntervalMin = 5) { if (maxBackOffDelayInMs < minBackOffDelayInMs) { throw new ArgumentOutOfRangeException("maxBackOffDelayInSeconds", "maxBackOffDelayInSeconds must be greater than minBackOffDelay"); } switch (consumeModel) { case ConsumeModel.Single: if (consumeAction == null) { throw new ArgumentNullException("consumeAction"); } if (consumeBatchAction != null) { throw new ArgumentException("consumeBatchAction must be null when consumeModel is not set to Batch"); } break; case ConsumeModel.Batch: if (consumeBatchAction == null) { throw new ArgumentNullException("consumeBatchAction"); } if (consumeAction != null) { throw new ArgumentException("consumeAction must be null when consumeModel is not set to Single"); } break; default: throw new ArgumentOutOfRangeException("consumeModel"); } this._minDelayInMs = minBackOffDelayInMs; this._maxDelayInMs = maxBackOffDelayInMs; this._cancellationToken = new CancellationTokenSource(); this._batchSize = batchSize; this._housekeepingIntervalMin = housekeepingIntervalMin; this._eventConsumer = eventConsumer; this._logger = logger; this._subscriptionName = subscriptionName; this._consumeAction = consumeAction; this._consumeBatchAction = consumeBatchAction; this._consumeModel = consumeModel; this._visibilityTimeout = visibilityTimeout; }