private async Task ReadChannelAsync( int index, ChannelReader <ConsumeResult <byte[]?, byte[]?> > channelReader, CancellationToken cancellationToken) { try { _logger.LogTrace( IntegrationEventIds.LowLevelTracing, "Starting channel {channelIndex} processing loop... (consumerId: {consumerId})", index, _consumer.Id); while (!cancellationToken.IsCancellationRequested) { await ReadChannelOnceAsync(channelReader, index, cancellationToken).ConfigureAwait(false); } } catch (OperationCanceledException) { // Ignore _logger.LogTrace( IntegrationEventIds.LowLevelTracing, "Exiting channel {channelIndex} processing loop (operation canceled). (consumerId: {consumerId})", index, _consumer.Id); } catch (Exception ex) { if (!(ex is ConsumerPipelineFatalException)) { _logger.LogCritical( IntegrationEventIds.ConsumerFatalError, ex, "Fatal error occurred processing the consumed message. The consumer will be stopped. (consumerId: {consumerId})", _consumer.Id); } IsReading[index] = false; _readTaskCompletionSources[index].TrySetResult(false); await _consumer.DisconnectAsync().ConfigureAwait(false); } IsReading[index] = false; _readTaskCompletionSources[index].TrySetResult(true); _logger.LogTrace( IntegrationEventIds.LowLevelTracing, "Exited channel {channelIndex} processing loop. (consumerId: {consumerId})", index, _consumer.Id); }
private async Task ConsumeAsync( TaskCompletionSource <bool> taskCompletionSource, CancellationToken cancellationToken) { while (!_cancellationTokenSource.IsCancellationRequested) { if (!ConsumeOnce(cancellationToken)) { break; } } taskCompletionSource.TrySetResult(true); // There's unfortunately no async version of Confluent.Kafka.IConsumer.Consume() so we need to run // synchronously to stay within a single long-running thread with the Consume loop. if (!cancellationToken.IsCancellationRequested) { await _consumer.DisconnectAsync().ConfigureAwait(false); } }