/// <summary>
 /// Sets an Avro deserializer for keys.
 /// </summary>
 /// <typeparam name="TKey">
 /// The type of key to be deserialized.
 /// </typeparam>
 /// <typeparam name="TValue">
 /// The type of value to be deserialized.
 /// </typeparam>
 /// <param name="consumerBuilder">
 /// A <see cref="ConsumerBuilder{TKey, TValue}" /> instance to be configured.
 /// </param>
 /// <param name="deserializerBuilder">
 /// A deserializer builder.
 /// </param>
 /// <param name="subject">
 /// The subject of the schema that should be used to deserialize keys.
 /// </param>
 /// <param name="version">
 /// The version of the subject to be resolved.
 /// </param>
 /// <returns>
 /// <paramref name="consumerBuilder" /> with an Avro deserializer configured for
 /// <typeparamref name="TKey" />.
 /// </returns>
 public static async Task <ConsumerBuilder <TKey, TValue> > SetAvroKeyDeserializer <TKey, TValue>(
     this ConsumerBuilder <TKey, TValue> consumerBuilder,
     ISchemaRegistryDeserializerBuilder deserializerBuilder,
     string subject,
     int version)
 => consumerBuilder.SetKeyDeserializer(
     await deserializerBuilder.Build <TKey>(subject, version).ConfigureAwait(false));
Exemple #2
0
 /// <summary>
 /// Set the message key deserializer.
 /// </summary>
 /// <param name="consumerBuilder">
 /// The <see cref="ConsumerBuilder{TKey, TValue}" /> instance to be configured.
 /// </param>
 /// <param name="registryClient">
 /// The client to use for Schema Registry operations. The client should only be disposed
 /// after the consumer; the deserializer will use it to request schemas as messages are
 /// being consumed.
 /// </param>
 public static ConsumerBuilder <TKey, TValue> SetAvroKeyDeserializer <TKey, TValue>(
     this ConsumerBuilder <TKey, TValue> consumerBuilder,
     ISchemaRegistryClient registryClient
     ) => consumerBuilder.SetKeyDeserializer(
     new AsyncSchemaRegistryDeserializer <TKey>(
         registryClient
         ).AsSyncOverAsync());
Exemple #3
0
 /// <summary>
 /// Set the message key deserializer.
 /// </summary>
 /// <param name="consumerBuilder">
 /// The <see cref="ConsumerBuilder{TKey, TValue}" /> instance to be configured.
 /// </param>
 /// <param name="registryConfiguration">
 /// Schema Registry configuration. Using the <see cref="SchemaRegistryConfig" /> class is
 /// highly recommended.
 /// </param>
 public static ConsumerBuilder <TKey, TValue> SetAvroKeyDeserializer <TKey, TValue>(
     this ConsumerBuilder <TKey, TValue> consumerBuilder,
     IEnumerable <KeyValuePair <string, string> > registryConfiguration
     ) => consumerBuilder.SetKeyDeserializer(
     new AsyncSchemaRegistryDeserializer <TKey>(
         registryConfiguration
         ).AsSyncOverAsync());
        /// <summary>
        /// Need to return a <see cref="IConsumer{TKey, TValue}"/> for unit tests.
        /// Unfortunately <see cref="ConsumerBuilder{TKey, TValue}"/> returns <see cref="Consumer{TKey, TValue}"/>
        /// </summary>
        protected virtual IConsumer <TKey, TValue> CreateConsumer(
            ConsumerConfig config,
            Action <Consumer <TKey, TValue>, Error> errorHandler,
            Action <IConsumer <TKey, TValue>, List <TopicPartition> > partitionsAssignedHandler,
            Action <IConsumer <TKey, TValue>, List <TopicPartitionOffset> > partitionsRevokedHandler,
            IAsyncDeserializer <TValue> asyncValueDeserializer = null,
            IDeserializer <TValue> valueDeserializer           = null,
            IAsyncDeserializer <TKey> keyDeserializer          = null
            )
        {
            var builder = new ConsumerBuilder <TKey, TValue>(config)
                          .SetErrorHandler(errorHandler)
                          .SetPartitionsAssignedHandler(partitionsAssignedHandler)
                          .SetPartitionsRevokedHandler(partitionsRevokedHandler);

            if (keyDeserializer != null)
            {
                builder.SetKeyDeserializer(keyDeserializer);
            }

            if (asyncValueDeserializer != null)
            {
                builder.SetValueDeserializer(asyncValueDeserializer);
            }
            else if (valueDeserializer != null)
            {
                builder.SetValueDeserializer(valueDeserializer);
            }

            return(builder.Build());
        }
        #pragma warning disable 8618
        public EventConsumer(
            IEventDeserializer eventDeserializer,
            EventConsumerConfig config,
            ILogger <EventConsumer <TAggregate, TAggregateId, TDeserializer> > logger)
        {
            _eventDeserializer = eventDeserializer;
            _logger            = logger;

            var aggregateType = typeof(TAggregate);

            var consumerConfig = new ConsumerConfig
            {
                GroupId            = config.ConsumerGroup,
                BootstrapServers   = config.KafkaConnectionString,
                AutoOffsetReset    = AutoOffsetReset.Earliest,
                EnablePartitionEof = true
            };

            var consumerBuilder        = new ConsumerBuilder <TAggregateId, string>(consumerConfig);
            var keyDeserializerFactory = new KeyDeserializerFactory();

            consumerBuilder.SetKeyDeserializer(keyDeserializerFactory.Create <TDeserializer, TAggregateId>());

            _eventConsumer = consumerBuilder.Build();

            var topicName = $"{config.TopicBaseName}-{aggregateType.Name}";

            _eventConsumer.Subscribe(topicName);
        }
Exemple #6
0
 public static IConsumer <T1, T2> Build <T1, T2>(this ConsumerBuilder <T1, T2> builder, JsonSerializerSettings serializerSettings)
     where T1 : class
     where T2 : class
 {
     return(builder
            .SetKeyDeserializer(new JsonNETKafkaSerializer <T1>(serializerSettings))
            .SetValueDeserializer(new JsonNETKafkaSerializer <T2>(serializerSettings))
            .Build());
 }
Exemple #7
0
        public ConsumerConnectionBuilder <TKey, TValue> WithSchemaRegistry(string url)
        {
            if (typeof(TKey) == typeof(string) || typeof(TValue) == typeof(string))
            {
                return(this);
            }

            var schemaRegistryConfig = new SchemaRegistryConfig()
            {
                SchemaRegistryUrl = url
            };
            var schemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryConfig);
            var deserializerConfig   = new AvroDeserializerConfig();

            consumerBuilder.SetKeyDeserializer(new AvroDeserializer <TKey>(schemaRegistryClient, deserializerConfig).AsSyncOverAsync());
            consumerBuilder.SetValueDeserializer(new AvroDeserializer <TValue>(schemaRegistryClient, deserializerConfig).AsSyncOverAsync());

            return(this);
        }
 public ConsumerBuilder <TKey, TValue> Configure <TKey, TValue>(
     ConsumerBuilder <TKey, TValue> builder,
     IDeserializer <TKey> keyDeserializer,
     IDeserializer <TValue> valueDeserializer)
 {
     builder
     .SetKeyDeserializer(keyDeserializer)
     .SetValueDeserializer(valueDeserializer)
     .SetLogHandler(LogHandler)
     .SetErrorHandler(ErrorHandler)
     .SetStatisticsHandler(StatisticsHandler)
     .SetPartitionsAssignedHandler(PartitionsAssignedHandler)
     .SetPartitionsRevokedHandler(PartitionsRevokedHandler);
     return(builder);
 }
        private ConsumerBuilder <K, T> CreateKafkaConsumerBuilder()
        {
            var kafkaConsumerBuilder = new ConsumerBuilder <K, T>(consumerConfig);

            kafkaConsumerBuilder.SetKeyDeserializer(keyDeserializer);
            kafkaConsumerBuilder.SetValueDeserializer(valueDeserializer);
            kafkaConsumerBuilder.SetErrorHandler((_, e) => OnError?.Invoke(new StreamingError {
                IsFatal = e.IsFatal, Reason = e.Reason
            }));
            kafkaConsumerBuilder.SetStatisticsHandler((_, statistics) => OnStatistics?.Invoke(statistics));

            if (partitionsAssignedHandle != null && CommitEnable)
            {
                throw new ArgumentException("The partition assigned handle can not been setted if " +
                                            "'CommitEnable' property was setted to true.");
            }
            else if (partitionsAssignedHandle != null)
            {
                kafkaConsumerBuilder.SetPartitionsAssignedHandler(partitionsAssignedHandle);
            }

            return(kafkaConsumerBuilder);
        }
 /// <summary>
 /// Sets an Avro deserializer for keys.
 /// </summary>
 /// <typeparam name="TKey">
 /// The type of key to be deserialized.
 /// </typeparam>
 /// <typeparam name="TValue">
 /// The type of value to be deserialized.
 /// </typeparam>
 /// <param name="consumerBuilder">
 /// A <see cref="ConsumerBuilder{TKey, TValue}" /> instance to be configured.
 /// </param>
 /// <param name="deserializerBuilder">
 /// A deserializer builder.
 /// </param>
 /// <param name="id">
 /// The ID of the schema that should be used to deserialize keys.
 /// </param>
 /// <returns>
 /// <paramref name="consumerBuilder" /> with an Avro deserializer configured for
 /// <typeparamref name="TKey" />.
 /// </returns>
 public static async Task <ConsumerBuilder <TKey, TValue> > SetAvroKeyDeserializer <TKey, TValue>(
     this ConsumerBuilder <TKey, TValue> consumerBuilder,
     ISchemaRegistryDeserializerBuilder deserializerBuilder,
     int id)
 => consumerBuilder.SetKeyDeserializer(
     await deserializerBuilder.Build <TKey>(id).ConfigureAwait(false));
        public async Task ConsumeMessages(CancellationToken stoppingToken)
        {
            var kafkaConfig = new ConsumerConfig
            {
                // Note: The AutoOffsetReset property determines the start offset in the event
                // there are not yet any committed offsets for the consumer group for the
                // topic/partitions of interest. By default, offsets are committed
                // automatically, so in this example, consumption will only start from the
                // earliest message in the topic 'my-topic' the first time you run the program.
                AutoOffsetReset  = AutoOffsetReset.Earliest,
                GroupId          = ConsumerGroupId,
                BootstrapServers = Kafka
            };

            var consumer = new ConsumerBuilder <Key, Event>(kafkaConfig);

            consumer.SetErrorHandler(ConsumerErrorHandler);
            consumer.SetStatisticsHandler(ConsumerStatsHandler);

            using (var schemaRegistry = new CachedSchemaRegistryClient(_schemaRegistryConfig))
            {
                consumer.SetKeyDeserializer(new AvroDeserializer <Key>(schemaRegistry).AsSyncOverAsync());
                consumer.SetValueDeserializer(new AvroDeserializer <Event>(schemaRegistry).AsSyncOverAsync());

                using (var c = consumer.Build())
                {
                    c.Subscribe(_topics.FirstOrDefault());

                    Console.CancelKeyPress += (_, e) =>
                    {
                        e.Cancel = true; // prevent the process from terminating.
                    };

                    try
                    {
                        while (!stoppingToken.IsCancellationRequested)
                        {
                            try
                            {
                                var cr = c.Consume(stoppingToken);
                                if (!cr.IsPartitionEOF)
                                {
                                    ProcessMessage(cr);
                                }
                            }
                            catch (ConsumeException e)
                            {
                                _logger.LogError($"Error occured in Kafka Consumer service: {e.Error.Reason + Environment.NewLine + e.StackTrace}");
                            }

                            await Task.Delay(VerificationDelay, stoppingToken);
                        }
                    }
                    catch (OperationCanceledException)
                    {
                        // Ensure the consumer leaves the group cleanly and final offsets are committed.
                        c.Close();
                    }
                }
            }
        }
 /// <summary>
 /// Set the message key deserializer.
 /// </summary>
 /// <param name="consumerBuilder">
 /// The <see cref="ConsumerBuilder{TKey, TValue}" /> instance to be configured.
 /// </param>
 /// <param name="deserializerBuilder">
 /// A deserializer builder.
 /// </param>
 /// <param name="subject">
 /// The subject of the schema that should be used to deserialize keys. The latest version of
 /// the subject will be resolved.
 /// </param>
 public static async Task <ConsumerBuilder <TKey, TValue> > SetAvroKeyDeserializer <TKey, TValue>(
     this ConsumerBuilder <TKey, TValue> consumerBuilder,
     SchemaRegistryDeserializerBuilder deserializerBuilder,
     string subject
     ) => consumerBuilder.SetKeyDeserializer(await deserializerBuilder.Build <TKey>(subject));
Exemple #13
0
        public static void Main(string[] args)
        {
            var configuration = GetConfiguration(args);

            try
            {
                var          prometheusConfig = configuration.GetSection("prometheusMetrics").Get <PrometheusConfig>();
                MetricServer metricServer     = null;
                if (prometheusConfig.Enabled)
                {
                    metricServer = new MetricServer(port: prometheusConfig.Port);
                    metricServer.Start();
                }

                CancellationTokenSource cancellationTokenSource = new CancellationTokenSource();
                var consumerConf = configuration.GetSection("consumerConf").Get <ConsumerConfig>();
                consumerConf.GroupId = Guid.NewGuid().ToString();

                ConsumerBuilder <Null, string> builder = new ConsumerBuilder <Null, string>(consumerConf);
                builder.SetErrorHandler((_, error) =>
                {
                    Console.WriteLine($"An error ocurred consuming the event: {error.Reason}");
                    if (error.IsFatal)
                    {
                        Environment.Exit(-1);
                    }
                });

                builder.HandleStatistics(new PrometheusConsumerStatisticsHandler(new string[] { "application" }, new string[] { "test-consumer-statistics" }));
                builder.SetKeyDeserializer(Deserializers.Null);
                builder.SetValueDeserializer(Deserializers.Utf8);

                using (var kafkaConsumer = builder.Build())
                {
                    kafkaConsumer.Subscribe(configuration.GetValue <string>("topic"));
                    while (!cancellationTokenSource.IsCancellationRequested)
                    {
                        ConsumeResult <Null, string> consumedResult;
                        try
                        {
                            consumedResult = kafkaConsumer.Consume(cancellationTokenSource.Token);
                            if (null != consumedResult)
                            {
                                Console.WriteLine($"Received message: {consumedResult.Value}");
                            }
                        }
                        catch (Exception ex)
                        {
                            Console.WriteLine("An error occurred consuming the event.", ex);
                            Environment.Exit(-2);
                        }
                    }
                }

                Console.WriteLine("Exit requested. Gracefully exiting...");
            }
            catch (Exception ex)
            {
                Console.WriteLine("An error occurred while starting up the test.", ex);
                Environment.Exit(-2);
            }
        }
Exemple #14
0
 private static ConsumerBuilder <string, string> AddDeserializers(ConsumerBuilder <string, string> consumerBuilder)
 {
     return(consumerBuilder
            .SetKeyDeserializer(Serialization.StringDeserializer.Instance)
            .SetValueDeserializer(Serialization.StringDeserializer.Instance));
 }
        /// <summary>
        ///     初始化当前客户端
        /// </summary>
        /// <param name="setting"></param>
        /// <returns></returns>
        public async Task Init(KafkaConsumerSetting setting)
        {
            var config = new ConsumerConfig
            {
                //    设置当前Kafka客户端分组,
                //    只要不更改group.id,每次重新消费kafka,都是从上次消费结束的地方继续开始
                GroupId          = setting.GroupId,
                BootstrapServers = setting.BootstrapServers,
                EnableAutoCommit = true,
                AutoOffsetReset  = AutoOffsetReset.Earliest
            };
            var consuming = true;
            var builder   = new ConsumerBuilder <T1, T2>(config).SetErrorHandler((_, e) =>
            {
                Logger.LogError($"Error: {e.Reason}");
                consuming = !e.IsFatal;
            });

            if (setting.ConsumeOffset != ConsumeOffset.Unspecified)
            {
                builder.SetPartitionsAssignedHandler((c, partitions) =>
                {
                    Logger.LogInformation($"Assigned partitions: [{string.Join(", ", partitions)}]");
                    return(partitions.Select(y =>
                                             new TopicPartitionOffset(y, new Offset((long)setting.ConsumeOffset))));
                });
            }

            if (_keyDeserializer != null)
            {
                builder.SetKeyDeserializer(_keyDeserializer);
            }
            if (_valueDeserializer != null)
            {
                builder.SetValueDeserializer(_valueDeserializer);
            }

            using (_consumer = builder.Build())
            {
                _consumer.Subscribe(setting.Topic);
                while (consuming && !_token.IsCancellationRequested)
                {
                    try
                    {
                        var cr = _consumer.Consume(_token);
                        try
                        {
                            await Consume(cr.Key, cr.Value, cr.Timestamp.UtcDateTime);
                        }
                        catch (Exception e)
                        {
                            Logger.LogError("KafkaConsumerManager.Consume error", e);
                        }
                    }
                    catch (ConsumeException e)
                    {
                        Logger.LogError($"Error occured: {e.Error.Reason}");
                    }
                }
            }
        }
Exemple #16
0
        public void Start(string instanceId, CancellationToken cancellationToken = default(CancellationToken))
        {
            funcExecSemaphore = new Semaphore(MaxOutstanding, MaxOutstanding);

            CancellationTokenSource errorCts     = new CancellationTokenSource();
            CancellationTokenSource compositeCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, errorCts.Token);
            CancellationToken       compositeCancellationToken = compositeCts.Token;

            bool aMessageHasBeenProcessed = false;

            var cConfig = new ConsumerConfig
            {
                ClientId              = $"{Name}-consumer-{instanceId}",
                GroupId               = $"{Name}-group",
                BootstrapServers      = BootstrapServers,
                EnableAutoCommit      = true,
                EnableAutoOffsetStore = false,
                AutoOffsetReset       = AutoOffsetReset.Latest
            };

            if (DebugContext != null)
            {
                cConfig.Debug = DebugContext;
            }

            var cBuilder = new ConsumerBuilder <TInKey, TInValue>(cConfig);

            if (InKeyDeserializer != null)
            {
                cBuilder.SetKeyDeserializer(InKeyDeserializer);
            }
            if (InValueDeserializer != null)
            {
                cBuilder.SetValueDeserializer(InValueDeserializer);
            }
            if (Logger != null)
            {
                cBuilder.SetLogHandler((_, m) =>
                {
                    Logger(m);
                });
            }

            cBuilder.SetErrorHandler((c, e) =>
            {
                if (e.Code == ErrorCode.Local_AllBrokersDown ||
                    e.Code == ErrorCode.Local_Authentication)
                {
                    if (!aMessageHasBeenProcessed)
                    {
                        // Logger.Log(e);
                        errorCts.Cancel();
                        return;
                    }
                }

                if (Logger != null)
                {
                    Logger(new LogMessage(c.Name, SyslogLevel.Error, "unknown", e.Reason));
                }
            });


            var pConfig = new ProducerConfig
            {
                ClientId             = $"{Name}-producer-{instanceId}",
                BootstrapServers     = BootstrapServers,
                EnableIdempotence    = true,
                LingerMs             = 5,
                DeliveryReportFields = "none"
            };

            if (DebugContext != null)
            {
                pConfig.Debug = DebugContext;
            }

            var pBuilder = new ProducerBuilder <TOutKey, TOutValue>(pConfig);

            if (OutKeySerializer != null)
            {
                pBuilder.SetKeySerializer(OutKeySerializer);
            }
            if (OutValueSerializer != null)
            {
                pBuilder.SetValueSerializer(OutValueSerializer);
            }
            if (Logger != null)
            {
                pBuilder.SetLogHandler((_, m) =>
                {
                    Logger(m);
                });
            }
            pBuilder.SetErrorHandler((p, e) =>
            {
                if (e.IsFatal)
                {
                    errorCts.Cancel();
                    return;
                }

                if (e.Code == ErrorCode.Local_AllBrokersDown ||
                    e.Code == ErrorCode.Local_Authentication)
                {
                    if (!aMessageHasBeenProcessed)
                    {
                        errorCts.Cancel();
                        return;
                    }
                }

                if (Logger != null)
                {
                    Logger(new LogMessage(p.Name, SyslogLevel.Error, "unknown", e.Reason));
                }
            });

            var partitionState = new Dictionary <TopicPartition, PartitionState>();

            using (var producer = pBuilder.Build())
                using (var consumer = cBuilder.Build())
                {
                    consumer.Subscribe(InputTopic);

                    try
                    {
                        while (true)
                        {
                            ConsumeResult <TInKey, TInValue> cr;
                            try
                            {
                                cr = consumer.Consume(compositeCancellationToken);
                            }
                            catch (ConsumeException ex)
                            {
                                if (ex.Error.Code == ErrorCode.Local_ValueDeserialization)
                                {
                                    // For an in-depth discussion of what to do in the event of deserialization errors, refer to:
                                    // https://www.confluent.io/blog/kafka-connect-deep-dive-error-handling-dead-letter-queues

                                    if (ConsumeErrorTolerance == ErrorTolerance.All)
                                    {
                                        continue;
                                    }

                                    errorCts.Cancel(); // no error tolerance.
                                }

                                Thread.Sleep(TimeSpan.FromSeconds(10)); // ?? if not fail fast, do we want to sleep and why?
                                continue;
                            }

                            if (!partitionState.ContainsKey(cr.TopicPartition))
                            {
                                partitionState.Add(cr.TopicPartition, new PartitionState(this));
                            }
                            partitionState[cr.TopicPartition].HandleConsumedMessage(cr, consumer, producer, funcExecSemaphore, errorCts);

                            aMessageHasBeenProcessed = true;
                        }
                    }
                    catch (OperationCanceledException) { }
                }

            if (errorCts.IsCancellationRequested)
            {
                throw new Exception("error occured, and we're failing fast.");
            }
        }