/// <summary>
        /// Initializes a new instance of the <see cref="KafkaReceiver"/> class.
        /// </summary>
        /// <param name="name">The name of the receiver.</param>
        /// <param name="topic">
        /// The topic to subscribe to. A regex can be specified to subscribe to the set of
        /// all matching topics (which is updated as topics are added / removed from the
        /// cluster). A regex must be front anchored to be recognized as a regex. e.g. ^myregex
        /// </param>
        /// <param name="consumerConfig">The configuration used in creation of the Kafka consumer.</param>
        public KafkaReceiver(string name, string topic, ConsumerConfig consumerConfig)
            : base(name)
        {
            if (consumerConfig is null)
            {
                throw new ArgumentNullException(nameof(consumerConfig));
            }

            if (consumerConfig.EnableAutoCommit is false)
            {
                throw new ArgumentOutOfRangeException(nameof(consumerConfig), "The 'EnableAutoCommit' setting must be true.");
            }

            Topic                 = topic ?? throw new ArgumentNullException(nameof(topic));
            GroupId               = consumerConfig.GroupId;
            BootstrapServers      = consumerConfig.BootstrapServers;
            EnableAutoOffsetStore = consumerConfig.EnableAutoOffsetStore;
            AutoOffsetReset       = consumerConfig.AutoOffsetReset;

            var builder = new ConsumerBuilder <string, byte[]>(consumerConfig);

            builder.SetErrorHandler(OnError);

            _consumer = new Lazy <IConsumer <string, byte[]> >(() => builder.Build());

            _pollingThread = new Lazy <Thread>(() => new Thread(PollForMessages)
            {
                IsBackground = true
            });
            _trackingThread = new Lazy <Thread>(() => new Thread(TrackMessageHandling)
            {
                IsBackground = true
            });
        }
Ejemplo n.º 2
0
        public IConsumer <byte[], byte[]> GetConsumer(ConsumerConfig config, IConsumerRebalanceListener rebalanceListener)
        {
            ConsumerBuilder <byte[], byte[]> builder = builderKafkaHandler.GetConsumerBuilder(config);

            if (rebalanceListener != null)
            {
                builder.SetPartitionsAssignedHandler((c, p) => rebalanceListener.PartitionsAssigned(c, p));
                builder.SetPartitionsRevokedHandler((c, p) => rebalanceListener.PartitionsRevoked(c, p));
                builder.SetLogHandler(loggerAdapter.LogConsume);
                builder.SetErrorHandler(loggerAdapter.ErrorConsume);
                if (exposeLibrdKafka)
                {
                    // TODO : test librdkafka statistics with IntegrationTest (WIP see #82)
                    var consumerStatisticsHandler = new ConsumerStatisticsHandler(
                        config.ClientId,
                        streamConfig.ApplicationId,
                        (config as StreamizConsumerConfig)?.ThreadId);
                    consumerStatisticsHandler.Register(MetricsRegistry);
                    builder.SetStatisticsHandler((c, stat) =>
                    {
                        var statistics = JsonConvert.DeserializeObject <Statistics>(stat);
                        consumerStatisticsHandler.Publish(statistics);
                    });
                }
            }

            return(builder.Build());
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="KafkaReceiver"/> class.
        /// </summary>
        /// <param name="name">The name of the receiver.</param>
        /// <param name="topic">
        /// The topic to subscribe to. A regex can be specified to subscribe to the set of
        /// all matching topics (which is updated as topics are added / removed from the
        /// cluster). A regex must be front anchored to be recognized as a regex. e.g. ^myregex
        /// </param>
        /// <param name="groupId">
        /// Client group id string. All clients sharing the same group.id belong to the same group.
        /// </param>
        /// <param name="bootstrapServers">
        /// List of brokers as a CSV list of broker host or host:port.
        /// </param>
        /// <param name="enableAutoOffsetStore">
        /// Whether to automatically store offset of last message provided to application.
        /// </param>
        /// <param name="autoOffsetReset">
        /// Action to take when there is no initial offset in offset store or the desired
        /// offset is out of range: 'smallest','earliest' - automatically reset the offset
        /// to the smallest offset, 'largest','latest' - automatically reset the offset to
        /// the largest offset, 'error' - trigger an error which is retrieved by consuming
        /// messages and checking 'message->err'.
        /// </param>
        public KafkaReceiver(string name, string topic, string groupId, string bootstrapServers,
                             bool enableAutoOffsetStore = false, AutoOffsetReset autoOffsetReset = Confluent.Kafka.AutoOffsetReset.Latest)
            : base(name)
        {
            Topic                 = topic ?? throw new ArgumentNullException(nameof(topic));
            GroupId               = groupId ?? throw new ArgumentNullException(nameof(groupId));
            BootstrapServers      = bootstrapServers ?? throw new ArgumentNullException(nameof(bootstrapServers));
            EnableAutoOffsetStore = enableAutoOffsetStore;
            AutoOffsetReset       = autoOffsetReset;

            var config  = GetConsumerConfig(groupId, bootstrapServers, enableAutoOffsetStore, autoOffsetReset);
            var builder = new ConsumerBuilder <string, byte[]>(config);

            builder.SetErrorHandler(OnError);

            _consumer = new Lazy <IConsumer <string, byte[]> >(() => builder.Build());

            _pollingThread = new Lazy <Thread>(() => new Thread(PollForMessages)
            {
                IsBackground = true
            });
            _trackingThread = new Lazy <Thread>(() => new Thread(TrackMessageHandling)
            {
                IsBackground = true
            });
        }
Ejemplo n.º 4
0
        public override Action Connection(IEnumerable <KeyValuePair <string, object> > options)
        {
            string borkerList = "";
            var    list       = options.GetEnumerator();

            while (list.MoveNext())
            {
                if ("BorkerList".Equals(list.Current.Key))
                {
                    borkerList = list.Current.Value.ToString();
                }
            }
            var cConfig = new ConsumerConfig
            {
                BootstrapServers      = borkerList,
                BrokerVersionFallback = "0.10.0.0",
                ApiVersionFallbackMs  = 0,
                //SaslMechanism = SaslMechanism.Plain,
                //SecurityProtocol = SecurityProtocol.SaslSsl,
                //SslCaLocation = "/usr/local/etc/openssl/cert.pem", // suitable configuration for linux, osx.
                // SslCaLocation = "c:\\path\\to\\cacert.pem",     // windows
                //SaslUsername = "******",
                //SaslPassword = "******",
                GroupId         = Guid.NewGuid().ToString(),
                AutoOffsetReset = AutoOffsetReset.Earliest
            };

            return(() =>
            {
                var consumerBuilder = new ConsumerBuilder <string, MessageBase>(cConfig);
                consumerBuilder.SetErrorHandler(OnConnectionException);
                _consumerClient = consumerBuilder.Build();
            });
        }
Ejemplo n.º 5
0
        public IConsumer <Ignore, byte[]> CreateConsumer(
            KafkaQueueConfiguration config,
            Action <IConsumer <Ignore, byte[]>, LogMessage> logHandler = null,
            Action <IConsumer <Ignore, byte[]>, Error> errorHandler    = null)
        {
            config.ThrowIfNull(nameof(config));

            var builder = new ConsumerBuilder <Ignore, byte[]>(new ConsumerConfig
            {
                GroupId                 = config.GroupId,
                BootstrapServers        = $"{config.Server}",
                AutoOffsetReset         = config.AutoOffsetReset,
                SaslKerberosKeytab      = config.KeyTab,
                SaslKerberosPrincipal   = config.User,
                SaslKerberosServiceName = config.ServiceName,
                SecurityProtocol        = config.Protocol,
                SaslMechanism           = config.Mechanism,
                Debug = config.Debug
            });

            if (logHandler != null)
            {
                builder.SetLogHandler(logHandler);
            }
            if (errorHandler != null)
            {
                builder.SetErrorHandler(errorHandler);
            }

            return(builder.Build());
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Initializes a new instance of the <see cref="KafkaReceiver"/> class.
        /// </summary>
        /// <param name="name">The name of the receiver.</param>
        /// <param name="topic">
        /// The topic to subscribe to. A regex can be specified to subscribe to the set of
        /// all matching topics (which is updated as topics are added / removed from the
        /// cluster). A regex must be front anchored to be recognized as a regex. e.g. ^myregex
        /// </param>
        /// <param name="groupId">
        /// Client group id string. All clients sharing the same group.id belong to the same
        /// group.
        /// </param>
        /// <param name="bootstrapServers">
        /// List of brokers as a CSV list of broker host or host:port.
        /// </param>
        /// <param name="config">
        /// A collection of librdkafka configuration parameters (refer to
        /// https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md) and parameters
        /// specific to this client (refer to: Confluent.Kafka.ConfigPropertyNames).
        /// </param>
        public KafkaReceiver(string name, string topic, string groupId, string bootstrapServers, ConsumerConfig config = null)
            : base(name)
        {
            Topic  = topic ?? throw new ArgumentNullException(nameof(topic));
            Config = config ?? new ConsumerConfig();

            Config.GroupId          = groupId ?? throw new ArgumentNullException(nameof(groupId));
            Config.BootstrapServers = bootstrapServers ?? throw new ArgumentNullException(nameof(bootstrapServers));
            Config.EnableAutoCommit = Config.EnableAutoCommit ?? false;

            var consumerBuilder = new ConsumerBuilder <Ignore, string>(Config);

            consumerBuilder.SetErrorHandler(OnError);

            _consumer = new Lazy <IConsumer <Ignore, string> >(() => consumerBuilder.Build());

            _pollingThread = new Lazy <Thread>(() => new Thread(PollForMessages)
            {
                IsBackground = true
            });
            _trackingThread = new Lazy <Thread>(() => new Thread(TrackMessageHandling)
            {
                IsBackground = true
            });
        }
Ejemplo n.º 7
0
        private static void AddKafkaConsumer(this IServiceCollection services)
        {
            services.AddTransient(p =>
            {
                var logger         = p.GetService <ILogger <KafkaConsumerService> >();
                var kafkaOptions   = p.GetService <IOptions <KafkaOptions> >();
                var consumerConfig = new ConsumerConfig(kafkaOptions.Value.Configuration)
                {
                    EnablePartitionEof   = kafkaOptions.Value.EnablePartitionEof,
                    StatisticsIntervalMs = kafkaOptions.Value.StatisticsIntervalMs
                };
                var consumerBuilder = new ConsumerBuilder <string, string>(consumerConfig);
                var consumer        = consumerBuilder
                                      .SetErrorHandler((_, e) => logger?.LogError($"Error: {e.Reason}", e))
                                      .SetStatisticsHandler((_, json) => logger?.LogDebug($"Statistics: {json}"))
                                      .SetPartitionsAssignedHandler((c, partitions) =>
                {
                    logger?.LogInformation($"Assigned partitions: [{string.Join(", ", partitions)}]");
                })
                                      .SetPartitionsRevokedHandler((c, partitions) =>
                {
                    logger?.LogInformation($"Revoking assignment: [{string.Join(", ", partitions)}]");
                })
                                      .Build();

                return(consumer);
            });
        }
Ejemplo n.º 8
0
        public void ConsumeMessage(string topicName)
        {
            String ErrorReason = String.Empty;

            var consumeBldr = new ConsumerBuilder <Ignore, string>(_config.ConsumerConfig());
            {
                //Added Error handler
                consumeBldr.SetErrorHandler(ConsumerErrorHandler);
                consumeBldr.SetLogHandler(LogHandler);
                var consumer = consumeBldr.Build();

                try
                {
                    consumer.Subscribe(topicName);
                    ReadMessages(consumer);
                }
                catch (Exception ex)
                {
                    // if this exception occured then
                    ErrorReason = $"Error : { ex.Message } Type of { ex.GetType().Name } not handled and closing the consumer. { ex.InnerException?.Message } Source: { ex?.Source } .Stack :{ ex?.StackTrace}";
                    _worker.UnProcessedMessage("Empty Message Found", ErrorReason, SyslogLevel.Error);
                    consumer.Close();//TBD
                    consumer?.Dispose();
                }
            }
        }
Ejemplo n.º 9
0
        public IConsumer <string, string> Create()
        {
            var config  = new ConsumerConfig(_configuration.GetConsumerConfiguration());
            var builder = new ConsumerBuilder <string, string>(config);

            builder.SetErrorHandler(OnKafkaError);
            return(builder.Build());
        }
Ejemplo n.º 10
0
        public IConsumer <byte[], byte[]> GetRestoreConsumer(ConsumerConfig config)
        {
            ConsumerBuilder <byte[], byte[]> builder = builderKafkaHandler.GetConsumerBuilder(config);

            builder.SetLogHandler(loggerAdapter.LogConsume);
            builder.SetErrorHandler(loggerAdapter.ErrorConsume);
            return(builder.Build());
        }
Ejemplo n.º 11
0
        public IConsumer <byte[], byte[]> GetGlobalConsumer(ConsumerConfig config)
        {
            ConsumerBuilder <byte[], byte[]> builder = new ConsumerBuilder <byte[], byte[]>(config);

            // TOOD : Finish
            builder.SetLogHandler(loggerAdapter.LogConsume);
            builder.SetErrorHandler(loggerAdapter.ErrorConsume);
            return(builder.Build());
        }
Ejemplo n.º 12
0
        public IConsumer <byte[], byte[]> GetGlobalConsumer(ConsumerConfig config)
        {
            config.AutoOffsetReset = AutoOffsetReset.Earliest;
            ConsumerBuilder <byte[], byte[]> builder = builderKafkaHandler.GetConsumerBuilder(config);

            builder.SetLogHandler(loggerAdapter.LogConsume);
            builder.SetErrorHandler(loggerAdapter.ErrorConsume);
            return(builder.Build());
        }
Ejemplo n.º 13
0
 /// <summary>
 /// 订阅回调模式-消费(持续订阅)
 /// </summary>
 /// <param name="Func">回调函数,若配置为非自动提交(默认为否),则通过回调函数的返回值判断是否提交</param>
 /// <param name="Topic">主题</param>
 public void Consume(Func <ConsumeResult <TKey, TValue>, bool> Func, string Topic)
 {
     Task.Factory.StartNew(() =>
     {
         var builder = new ConsumerBuilder <TKey, TValue>(ConsumerConfig);
         //设置反序列化方式
         builder.SetValueDeserializer(new KafkaDConverter <TValue>());
         builder.SetErrorHandler((_, e) =>
         {
             Logger.Error(LoggerType.KafkaException, null, null, $"Error:{e.Reason}");
         }).SetStatisticsHandler((_, json) =>
         {
             Console.WriteLine($"-{DateTime.Now:yyyy-MM-dd HH:mm:ss} > 消息监听中..");
         }).SetPartitionsAssignedHandler((c, partitions) =>
         {
             string partitionsStr = string.Join(", ", partitions);
             Console.WriteLine($"-分配的kafka分区:{partitionsStr}");
         }).SetPartitionsRevokedHandler((c, partitions) =>
         {
             string partitionsStr = string.Join(", ", partitions);
             Console.WriteLine($"-回收了kafka的分区:{partitionsStr}");
         });
         using var consumer = builder.Build();
         consumer.Subscribe(Topic);
         while (AppSetting.Kafka.IsConsumerSubscribe) //true
         {
             ConsumeResult <TKey, TValue> result = null;
             try
             {
                 result = consumer.Consume();
                 if (result.IsPartitionEOF)
                 {
                     continue;
                 }
                 if (Func(result))
                 {
                     if (!(bool)ConsumerConfig.EnableAutoCommit)
                     {
                         //手动提交,如果上面的EnableAutoCommit=true表示自动提交,则无需调用Commit方法
                         consumer.Commit(result);
                     }
                 }
             }
             catch (ConsumeException ex)
             {
                 Logger.Error(LoggerType.KafkaException, $"Topic:{Topic},{ex.Error.Reason}", null, ex.Message + ex.StackTrace);
             }
             catch (Exception ex)
             {
                 Logger.Error(LoggerType.KafkaException, $"Topic:{result.Topic}", null, ex.Message + ex.StackTrace);
             }
         }
     });
 }
Ejemplo n.º 14
0
        private static void StartConsuming <T>()
        {
            bool consuming = true;
            ConsumerBuilder <Ignore, T> consumerBuilder = new ConsumerBuilder <Ignore, T>(Configuration.ConsumerConfig);

            consumerBuilder.SetErrorHandler((s, e) =>
            {
                consuming = !e.IsFatal;
                Log(e.ToString());
            });

            using (IConsumer <Ignore, T> consumer = consumerBuilder.Build())
            {
                ConsumerConfig consumerConfig = Configuration.ConsumerConfig;
                Log($"Consumer for group: '{consumerConfig.GroupId}' was created. Data type: '{Configuration.Type}'.");
                consumer.Subscribe(new[] { Configuration.TopicBlocks, Configuration.TopicTransactions, Configuration.TopicReceipts });
                Log($"Subscribed to topics: '{Configuration.TopicBlocks}', {Configuration.TopicTransactions}, '{Configuration.TopicReceipts}'.");
                while (consuming)
                {
                    try
                    {
                        ConsumeResult <Ignore, T> consumeResult = consumer.Consume();
                        Type type = typeof(T);
                        if (type == typeof(string))
                        {
                            ConsumeAsJson(consumeResult as ConsumeResult <Ignore, string>);
                        }
                        else if (type == typeof(byte[]))
                        {
                            ConsumeAsUtf8Json(consumeResult as ConsumeResult <Ignore, byte[]>);
                        }
                        else
                        {
                            Log($"Unknown data type: {type.Name}");

                            continue;
                        }

                        Log($"Consumed value at '{consumeResult.TopicPartitionOffset}'.");
                    }
                    catch (ConsumeException exception)
                    {
                        Log($"Consumer error occured: {exception.Error.Reason}");
                    }
                    catch (Exception exception)
                    {
                        Log(exception.Message);
                    }
                }

                consumer.Close();
            }
        }
Ejemplo n.º 15
0
        public IConsumer <byte[], byte[]> GetConsumer(ConsumerConfig config, IConsumerRebalanceListener rebalanceListener)
        {
            ConsumerBuilder <byte[], byte[]> builder = new ConsumerBuilder <byte[], byte[]>(config);

            if (rebalanceListener != null)
            {
                builder.SetPartitionsAssignedHandler((c, p) => rebalanceListener.PartitionsAssigned(c, p));
                builder.SetPartitionsRevokedHandler((c, p) => rebalanceListener.PartitionsRevoked(c, p));
                builder.SetLogHandler(loggerAdapter.LogConsume);
                builder.SetErrorHandler(loggerAdapter.ErrorConsume);
            }
            return(builder.Build());
        }
        public JT809_Same_Consumer(
            IOptions <JT809SameConsumerConfig> consumerConfigAccessor,
            ILoggerFactory loggerFactory)
        {
            logger = loggerFactory.CreateLogger("JT809_Same_Consumer");
            JT809ConsumerConfig = consumerConfigAccessor.Value;
            TopicName           = JT809ConsumerConfig.TopicName;
            ConsumerBuilder <string, byte[]> consumerBuilder = new ConsumerBuilder <string, byte[]>(consumerConfigAccessor.Value);

            consumerBuilder.SetErrorHandler((consumer, error) =>
            {
                logger.LogError(error.Reason);
            });
            Consumer = consumerBuilder.Build();
        }
Ejemplo n.º 17
0
        public IConsumer <T, TV> GetConsumerBuilder <T, TV>(string consumerId) where T : class where TV : class
        {
            if (string.IsNullOrEmpty(consumerId))
            {
                throw new ConsumerNotFoundException($"The consumerId param can not be null or empty");
            }

            var consumerOptions = KafkaOptions.Consumers.FirstOrDefault(p => p.ConsumerId == consumerId);

            if (consumerOptions == null)
            {
                throw new ConsumerNotFoundException($"Could not find consumer configuration with ConsumerId {consumerId}");
            }

            var configuration = GetDefaultKafkaConsumerConfiguration(consumerOptions);

            var consumer = new ConsumerBuilder <T, TV>(configuration);

            IConsumer <T, TV> result = null;

            try
            {
                consumer.SetErrorHandler((_, e) => Devon4NetLogger.Error(new ConsumerException($"Error code {e.Code} : {e.Reason}")));
                consumer.SetStatisticsHandler((_, json) => Devon4NetLogger.Information($"Statistics: {json}"));
                consumer.SetPartitionsAssignedHandler((c, partitions) =>
                {
                    Devon4NetLogger.Information($"Assigned partitions: [{string.Join(", ", partitions)}]");
                });
                consumer.SetPartitionsRevokedHandler((c, partitions) =>
                {
                    Devon4NetLogger.Information($"Revoking assignment: [{string.Join(", ", partitions)}]");
                });

                result = consumer.Build();
                if (!string.IsNullOrEmpty(consumerOptions.Topics))
                {
                    result.Subscribe(consumerOptions.GetTopics());
                }
            }
            catch (InvalidOperationException ex)
            {
                Devon4NetLogger.Error(ex);
            }

            return(result);
        }
Ejemplo n.º 18
0
        protected virtual IList <IConsumer <string, T> > CreateConsumers()
        {
            List <IConsumer <string, T> > consumers = new List <IConsumer <string, T> >();

            foreach (var topicPartition in topicPartitionList)
            {
                ConsumerBuilder <string, T> consumerBuilder = new ConsumerBuilder <string, T>(ConsumerConfig);
                consumerBuilder.SetErrorHandler((consumer, error) =>
                {
                    logger.LogError(error.Reason);
                });
                if (Deserializer != null)
                {
                    consumerBuilder.SetValueDeserializer(Deserializer);
                }
                consumers.Add(consumerBuilder.Build());
            }
            return(consumers);
        }
Ejemplo n.º 19
0
        /// <summary>
        /// 同步运行状态
        /// </summary>
        /// <returns></returns>
        Task <bool> IMessageReceiver.LoopBegin()
        {
            //KafkaOption.Instance.Consumer.Set(ConfigPropertyNames.Consumer.ConsumeResultFields, "all");
            builder = new ConsumerBuilder <byte[], string>(KafkaOption.Instance.Consumer);

            //var value = KafkaOption.Instance.Consumer.Get(ConfigPropertyNames.Consumer.ConsumeResultFields);

            builder.SetErrorHandler(OnError);
            builder.SetLogHandler(OnLog);
            consumer = builder.Build();

            consumer.Subscribe(Service.ServiceName);

            if (KafkaOption.Instance.Message.Concurrency > 0)
            {
                ConcurrencySemaphore = new SemaphoreSlim(KafkaOption.Instance.Message.Concurrency, KafkaOption.Instance.Message.Concurrency);
            }
            return(Task.FromResult(true));
        }
Ejemplo n.º 20
0
        protected JT809Consumer(
            IOptions <JT809TopicOptions> topicOptionsAccessor,
            IOptions <ConsumerConfig> consumerConfigAccessor,
            ILoggerFactory loggerFactory)
            : base(topicOptionsAccessor.Value.TopicName, consumerConfigAccessor.Value)
        {
            logger    = loggerFactory.CreateLogger("JT809Consumer");
            Consumers = new List <IConsumer <string, T> >();
            ConsumerBuilder <string, T> consumerBuilder = new ConsumerBuilder <string, T>(ConsumerConfig);

            consumerBuilder.SetErrorHandler((consumer, error) =>
            {
                logger.LogError(error.Reason);
            });
            if (Deserializer != null)
            {
                consumerBuilder.SetValueDeserializer(Deserializer);
            }
            Consumers.Add(consumerBuilder.Build());
        }
Ejemplo n.º 21
0
        /// <summary>
        /// Initializes a new instance of the <see cref="KafkaReceiver"/> class.
        /// </summary>
        /// <param name="name">The name of the receiver.</param>
        /// <param name="topic">
        /// The topic to subscribe to. A regex can be specified to subscribe to the set of
        /// all matching topics (which is updated as topics are added / removed from the
        /// cluster). A regex must be front anchored to be recognized as a regex. e.g. ^myregex
        /// </param>
        /// <param name="groupId">
        /// Client group id string. All clients sharing the same group.id belong to the same group.
        /// </param>
        /// <param name="bootstrapServers">
        /// List of brokers as a CSV list of broker host or host:port.
        /// </param>
        /// <param name="enableAutoOffsetStore">
        /// Whether to automatically store offset of last message provided to application.
        /// </param>
        /// <param name="autoOffsetReset">
        /// Action to take when there is no initial offset in offset store or the desired
        /// offset is out of range: 'smallest','earliest' - automatically reset the offset
        /// to the smallest offset, 'largest','latest' - automatically reset the offset to
        /// the largest offset, 'error' - trigger an error which is retrieved by consuming
        /// messages and checking 'message->err'.
        /// </param>
        /// <param name="replayEngine">
        /// The <see cref="IReplayEngine"/> used to replay messages. If <see langword="null"/>,
        /// then a <see cref="DefaultReplayEngine"/> is used.
        /// </param>
        public KafkaReceiver(string name, string topic, string groupId, string bootstrapServers,
                             bool enableAutoOffsetStore = false, AutoOffsetReset autoOffsetReset = AutoOffsetReset.Latest,
                             IReplayEngine replayEngine = null)
            : base(name)
        {
            if (string.IsNullOrEmpty(topic))
            {
                throw new ArgumentNullException(nameof(topic));
            }
            if (string.IsNullOrEmpty(groupId))
            {
                throw new ArgumentNullException(nameof(groupId));
            }
            if (string.IsNullOrEmpty(bootstrapServers))
            {
                throw new ArgumentNullException(nameof(bootstrapServers));
            }

            Topic                 = topic;
            GroupId               = groupId;
            BootstrapServers      = bootstrapServers;
            EnableAutoOffsetStore = enableAutoOffsetStore;
            AutoOffsetReset       = autoOffsetReset;
            ReplayEngine          = replayEngine ?? _defaultReplayEngine.Value;

            var config  = GetConsumerConfig(GroupId, BootstrapServers, EnableAutoOffsetStore, AutoOffsetReset);
            var builder = new ConsumerBuilder <string, byte[]>(config);

            builder.SetErrorHandler(OnError);

            _consumer = new Lazy <IConsumer <string, byte[]> >(() => builder.Build());

            _pollingThread = new Lazy <Thread>(() => new Thread(PollForMessages)
            {
                IsBackground = true
            });
            _trackingThread = new Lazy <Thread>(() => new Thread(TrackMessageHandling)
            {
                IsBackground = true
            });
        }
Ejemplo n.º 22
0
        /// <inheritdoc cref="IConfluentConsumerBuilder.Build" />
        public IConsumer <byte[]?, byte[]?> Build()
        {
            if (_config == null)
            {
                throw new InvalidOperationException("SetConfig must be called to provide the consumer configuration.");
            }

            var builder = new ConsumerBuilder <byte[]?, byte[]?>(_config);

            if (_statisticsHandler != null)
            {
                builder.SetStatisticsHandler(_statisticsHandler);
            }

            if (_errorHandler != null)
            {
                builder.SetErrorHandler(_errorHandler);
            }

            if (_partitionsAssignedHandler != null)
            {
                builder.SetPartitionsAssignedHandler(_partitionsAssignedHandler);
            }

            if (_partitionsRevokedHandler != null)
            {
                builder.SetPartitionsRevokedHandler(_partitionsRevokedHandler);
            }

            if (_offsetsCommittedHandler != null)
            {
                builder.SetOffsetsCommittedHandler(_offsetsCommittedHandler);
            }

            if (_logHandler != null)
            {
                builder.SetLogHandler(_logHandler);
            }

            return(builder.Build());
        }
Ejemplo n.º 23
0
        public static void ReadDatas(string brokerList, string topic, int partition, int offsetMin, int offsetMax)
        {
            try
            {
                Console.WriteLine("Start read datas");
                var config = new ConsumerConfig
                {
                    BootstrapServers = brokerList,
                    GroupId          = "csharp-consumer",
                    EnableAutoCommit = false,
                    SessionTimeoutMs = 6000,
                    // AutoOffsetReset = AutoOffsetReset.Earliest,
                    EnablePartitionEof = true
                };
                var consumerBuilder = new ConsumerBuilder <string, string>(config);
                consumerBuilder.SetErrorHandler(MyErrorHandler);
                consumerBuilder.SetLogHandler(MyLogHandler);

                var consumer = consumerBuilder.Build();

                // Read datas from
                consumer.Assign(new TopicPartitionOffset(topic, new Partition(partition), new Offset(offsetMin)));
                ConsumeResult <string, string> result;
                var sw = new Stopwatch();
                do
                {
                    sw.Restart();
                    result = consumer.Consume(10_000);
                    Console.WriteLine(sw.ElapsedMilliseconds);
                    if (result != null)
                    {
                        Console.WriteLine($"{result.IsPartitionEOF} {result.TopicPartitionOffset}: '{result.Message?.Key}' | '{result.Message?.Value}'");
                    }
                }while (result != null && !result.IsPartitionEOF && result.Offset < offsetMax);
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
        private ConsumerBuilder <K, T> CreateKafkaConsumerBuilder()
        {
            var kafkaConsumerBuilder = new ConsumerBuilder <K, T>(consumerConfig);

            kafkaConsumerBuilder.SetKeyDeserializer(keyDeserializer);
            kafkaConsumerBuilder.SetValueDeserializer(valueDeserializer);
            kafkaConsumerBuilder.SetErrorHandler((_, e) => OnError?.Invoke(new StreamingError {
                IsFatal = e.IsFatal, Reason = e.Reason
            }));
            kafkaConsumerBuilder.SetStatisticsHandler((_, statistics) => OnStatistics?.Invoke(statistics));

            if (partitionsAssignedHandle != null && CommitEnable)
            {
                throw new ArgumentException("The partition assigned handle can not been setted if " +
                                            "'CommitEnable' property was setted to true.");
            }
            else if (partitionsAssignedHandle != null)
            {
                kafkaConsumerBuilder.SetPartitionsAssignedHandler(partitionsAssignedHandle);
            }

            return(kafkaConsumerBuilder);
        }
Ejemplo n.º 25
0
        public async Task ConsumeMessages(CancellationToken stoppingToken)
        {
            var kafkaConfig = new ConsumerConfig
            {
                // Note: The AutoOffsetReset property determines the start offset in the event
                // there are not yet any committed offsets for the consumer group for the
                // topic/partitions of interest. By default, offsets are committed
                // automatically, so in this example, consumption will only start from the
                // earliest message in the topic 'my-topic' the first time you run the program.
                AutoOffsetReset  = AutoOffsetReset.Earliest,
                GroupId          = ConsumerGroupId,
                BootstrapServers = Kafka
            };

            var consumer = new ConsumerBuilder <Key, Event>(kafkaConfig);

            consumer.SetErrorHandler(ConsumerErrorHandler);
            consumer.SetStatisticsHandler(ConsumerStatsHandler);

            using (var schemaRegistry = new CachedSchemaRegistryClient(_schemaRegistryConfig))
            {
                consumer.SetKeyDeserializer(new AvroDeserializer <Key>(schemaRegistry).AsSyncOverAsync());
                consumer.SetValueDeserializer(new AvroDeserializer <Event>(schemaRegistry).AsSyncOverAsync());

                using (var c = consumer.Build())
                {
                    c.Subscribe(_topics.FirstOrDefault());

                    Console.CancelKeyPress += (_, e) =>
                    {
                        e.Cancel = true; // prevent the process from terminating.
                    };

                    try
                    {
                        while (!stoppingToken.IsCancellationRequested)
                        {
                            try
                            {
                                var cr = c.Consume(stoppingToken);
                                if (!cr.IsPartitionEOF)
                                {
                                    ProcessMessage(cr);
                                }
                            }
                            catch (ConsumeException e)
                            {
                                _logger.LogError($"Error occured in Kafka Consumer service: {e.Error.Reason + Environment.NewLine + e.StackTrace}");
                            }

                            await Task.Delay(VerificationDelay, stoppingToken);
                        }
                    }
                    catch (OperationCanceledException)
                    {
                        // Ensure the consumer leaves the group cleanly and final offsets are committed.
                        c.Close();
                    }
                }
            }
        }
Ejemplo n.º 26
0
        /// <summary>
        /// 创建消费者生成器
        /// </summary>
        private void CreateConsumerBuilder()
        {
            if (disposed)
            {
                throw new ObjectDisposedException(nameof(KafkaConsumer));
            }

            if (builder == null)
            {
                lock (this)
                {
                    if (builder == null)
                    {
                        ConsumerConfig config = new ConsumerConfig();
                        config.BootstrapServers = BootstrapServers;
                        config.GroupId          = GroupId;
                        config.AutoOffsetReset  = AutoOffsetReset.Earliest;
                        config.EnableAutoCommit = EnableAutoCommit;
                        if (!string.IsNullOrEmpty(SaslUsername))
                        {
                            config.SaslUsername     = SaslUsername;
                            config.SaslPassword     = SaslPassword;
                            config.SaslMechanism    = SaslMechanism.Plain;
                            config.SecurityProtocol = SecurityProtocol.SaslPlaintext;
                        }
                        //config.EnableAutoOffsetStore = true;
                        //config.IsolationLevel = IsolationLevel.ReadCommitted;
                        //config.MaxPollIntervalMs = 10000;


                        //List<KeyValuePair<string, string>> config = new List<KeyValuePair<string, string>>();
                        //config.Add(new KeyValuePair<string, string>("bootstrap.servers", BootstrapServers));
                        //config.Add(new KeyValuePair<string, string>("group.id", GroupId));
                        //config.Add(new KeyValuePair<string, string>("auto.offset.reset", "earliest"));
                        //config.Add(new KeyValuePair<string, string>("enable.auto.commit", EnableAutoCommit.ToString().ToLower()));
                        //if (!string.IsNullOrEmpty(SaslUsername))
                        //{
                        //    config.Add(new KeyValuePair<string, string>("security.protocol", "SASL_PLAINTEXT"));
                        //    config.Add(new KeyValuePair<string, string>("sasl.mechanism", "PLAIN"));
                        //    config.Add(new KeyValuePair<string, string>("sasl.username", SaslUsername));
                        //    config.Add(new KeyValuePair<string, string>("sasl.password", SaslPassword));
                        //}
                        //config.Add(new KeyValuePair<string, string>("max.poll.interval.ms", "10000"));
                        //config.Add(new KeyValuePair<string, string>("session.timeout.ms", "10000"));
                        //config.Add(new KeyValuePair<string, string>("isolation.level", "read_uncommitted"));

                        builder = new ConsumerBuilder <string, object>(config);

                        Action <Delegate, object> tryCatchWrap = (@delegate, arg) =>
                        {
                            try
                            {
                                @delegate?.DynamicInvoke(arg);
                            }
                            catch { }
                        };
                        builder.SetErrorHandler((p, e) => tryCatchWrap(ErrorHandler, new Exception(e.Reason)));
                        builder.SetStatisticsHandler((p, e) => tryCatchWrap(StatisticsHandler, e));
                        builder.SetLogHandler((p, e) => tryCatchWrap(LogHandler, new KafkaLogMessage(e)));
                        builder.SetValueDeserializer(new KafkaConverter());
                    }
                }
            }
        }
Ejemplo n.º 27
0
        public static void Main(string[] args)
        {
            var configuration = GetConfiguration(args);

            try
            {
                var          prometheusConfig = configuration.GetSection("prometheusMetrics").Get <PrometheusConfig>();
                MetricServer metricServer     = null;
                if (prometheusConfig.Enabled)
                {
                    metricServer = new MetricServer(port: prometheusConfig.Port);
                    metricServer.Start();
                }

                CancellationTokenSource cancellationTokenSource = new CancellationTokenSource();
                var consumerConf = configuration.GetSection("consumerConf").Get <ConsumerConfig>();
                consumerConf.GroupId = Guid.NewGuid().ToString();

                ConsumerBuilder <Null, string> builder = new ConsumerBuilder <Null, string>(consumerConf);
                builder.SetErrorHandler((_, error) =>
                {
                    Console.WriteLine($"An error ocurred consuming the event: {error.Reason}");
                    if (error.IsFatal)
                    {
                        Environment.Exit(-1);
                    }
                });

                builder.HandleStatistics(new PrometheusConsumerStatisticsHandler(new string[] { "application" }, new string[] { "test-consumer-statistics" }));
                builder.SetKeyDeserializer(Deserializers.Null);
                builder.SetValueDeserializer(Deserializers.Utf8);

                using (var kafkaConsumer = builder.Build())
                {
                    kafkaConsumer.Subscribe(configuration.GetValue <string>("topic"));
                    while (!cancellationTokenSource.IsCancellationRequested)
                    {
                        ConsumeResult <Null, string> consumedResult;
                        try
                        {
                            consumedResult = kafkaConsumer.Consume(cancellationTokenSource.Token);
                            if (null != consumedResult)
                            {
                                Console.WriteLine($"Received message: {consumedResult.Value}");
                            }
                        }
                        catch (Exception ex)
                        {
                            Console.WriteLine("An error occurred consuming the event.", ex);
                            Environment.Exit(-2);
                        }
                    }
                }

                Console.WriteLine("Exit requested. Gracefully exiting...");
            }
            catch (Exception ex)
            {
                Console.WriteLine("An error occurred while starting up the test.", ex);
                Environment.Exit(-2);
            }
        }
Ejemplo n.º 28
0
        private static void StartConsumingAvro <T>(string topic) where T : ISpecificRecord
        {
            bool consuming = true;
            ConsumerBuilder <Ignore, T> consumerBuilder = new ConsumerBuilder <Ignore, T>(Configuration.ConsumerConfig);

            consumerBuilder.SetErrorHandler((s, e) =>
            {
                consuming = !e.IsFatal;
                Log(e.ToString());
            });

            CachedSchemaRegistryClient schemaRegistry = new CachedSchemaRegistryClient(new[]
            {
                new KeyValuePair <string, string>(SchemaRegistryConfig.PropertyNames.SchemaRegistryUrl, Configuration.SchemaRegistryUrl)
            });

            var deserializer = new AvroDeserializer <T>(schemaRegistry).AsSyncOverAsync();

            consumerBuilder.SetValueDeserializer(deserializer);
            IConsumer <Ignore, T> consumer = consumerBuilder.Build();

            using (consumer)
            {
                ConsumerConfig consumerConfig = Configuration.ConsumerConfig;
                Log($"Consumer for group: '{consumerConfig.GroupId}' was created. Data type: '{Configuration.Type}'.");
                consumer.Subscribe(new[] { topic });
                Log($"Subscribed to topic: '{topic}'.");
                while (consuming)
                {
                    try
                    {
                        ConsumeResult <Ignore, T> consumeResult = consumer.Consume();
                        Type type = typeof(T);
                        if (type == typeof(Avro.Models.Block))
                        {
                            ConsumeResult <Ignore, Avro.Models.Block> result = consumeResult as ConsumeResult <Ignore, Avro.Models.Block>;
                            Avro.Models.Block block = result.Value;
                            Log($"Block: {block.blockNumber} {block.blockHash}");
                        }
                        else if (type == typeof(Avro.Models.FullTransaction))
                        {
                            ConsumeResult <Ignore, FullTransaction> result = consumeResult as ConsumeResult <Ignore, Avro.Models.FullTransaction>;
                            FullTransaction transaction = result.Value;
                            Log($"Transaction for block: {transaction.blockNumber} {transaction.receipt.blockHash}");
                        }
                        else
                        {
                            Log($"Unknown data type: {type.Name}");

                            continue;
                        }

                        Log($"Consumed value at '{consumeResult.TopicPartitionOffset}'.");
                    }
                    catch (ConsumeException exception)
                    {
                        Log($"Consumer error occured: {exception.Error.Reason}");
                    }
                    catch (Exception exception)
                    {
                        Log(exception.Message);
                    }
                }

                consumer.Close();
            }
        }
Ejemplo n.º 29
0
        public void Start(string instanceId, CancellationToken cancellationToken = default(CancellationToken))
        {
            funcExecSemaphore = new Semaphore(MaxOutstanding, MaxOutstanding);

            CancellationTokenSource errorCts     = new CancellationTokenSource();
            CancellationTokenSource compositeCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, errorCts.Token);
            CancellationToken       compositeCancellationToken = compositeCts.Token;

            bool aMessageHasBeenProcessed = false;

            var cConfig = new ConsumerConfig
            {
                ClientId              = $"{Name}-consumer-{instanceId}",
                GroupId               = $"{Name}-group",
                BootstrapServers      = BootstrapServers,
                EnableAutoCommit      = true,
                EnableAutoOffsetStore = false,
                AutoOffsetReset       = AutoOffsetReset.Latest
            };

            if (DebugContext != null)
            {
                cConfig.Debug = DebugContext;
            }

            var cBuilder = new ConsumerBuilder <TInKey, TInValue>(cConfig);

            if (InKeyDeserializer != null)
            {
                cBuilder.SetKeyDeserializer(InKeyDeserializer);
            }
            if (InValueDeserializer != null)
            {
                cBuilder.SetValueDeserializer(InValueDeserializer);
            }
            if (Logger != null)
            {
                cBuilder.SetLogHandler((_, m) =>
                {
                    Logger(m);
                });
            }

            cBuilder.SetErrorHandler((c, e) =>
            {
                if (e.Code == ErrorCode.Local_AllBrokersDown ||
                    e.Code == ErrorCode.Local_Authentication)
                {
                    if (!aMessageHasBeenProcessed)
                    {
                        // Logger.Log(e);
                        errorCts.Cancel();
                        return;
                    }
                }

                if (Logger != null)
                {
                    Logger(new LogMessage(c.Name, SyslogLevel.Error, "unknown", e.Reason));
                }
            });


            var pConfig = new ProducerConfig
            {
                ClientId             = $"{Name}-producer-{instanceId}",
                BootstrapServers     = BootstrapServers,
                EnableIdempotence    = true,
                LingerMs             = 5,
                DeliveryReportFields = "none"
            };

            if (DebugContext != null)
            {
                pConfig.Debug = DebugContext;
            }

            var pBuilder = new ProducerBuilder <TOutKey, TOutValue>(pConfig);

            if (OutKeySerializer != null)
            {
                pBuilder.SetKeySerializer(OutKeySerializer);
            }
            if (OutValueSerializer != null)
            {
                pBuilder.SetValueSerializer(OutValueSerializer);
            }
            if (Logger != null)
            {
                pBuilder.SetLogHandler((_, m) =>
                {
                    Logger(m);
                });
            }
            pBuilder.SetErrorHandler((p, e) =>
            {
                if (e.IsFatal)
                {
                    errorCts.Cancel();
                    return;
                }

                if (e.Code == ErrorCode.Local_AllBrokersDown ||
                    e.Code == ErrorCode.Local_Authentication)
                {
                    if (!aMessageHasBeenProcessed)
                    {
                        errorCts.Cancel();
                        return;
                    }
                }

                if (Logger != null)
                {
                    Logger(new LogMessage(p.Name, SyslogLevel.Error, "unknown", e.Reason));
                }
            });

            var partitionState = new Dictionary <TopicPartition, PartitionState>();

            using (var producer = pBuilder.Build())
                using (var consumer = cBuilder.Build())
                {
                    consumer.Subscribe(InputTopic);

                    try
                    {
                        while (true)
                        {
                            ConsumeResult <TInKey, TInValue> cr;
                            try
                            {
                                cr = consumer.Consume(compositeCancellationToken);
                            }
                            catch (ConsumeException ex)
                            {
                                if (ex.Error.Code == ErrorCode.Local_ValueDeserialization)
                                {
                                    // For an in-depth discussion of what to do in the event of deserialization errors, refer to:
                                    // https://www.confluent.io/blog/kafka-connect-deep-dive-error-handling-dead-letter-queues

                                    if (ConsumeErrorTolerance == ErrorTolerance.All)
                                    {
                                        continue;
                                    }

                                    errorCts.Cancel(); // no error tolerance.
                                }

                                Thread.Sleep(TimeSpan.FromSeconds(10)); // ?? if not fail fast, do we want to sleep and why?
                                continue;
                            }

                            if (!partitionState.ContainsKey(cr.TopicPartition))
                            {
                                partitionState.Add(cr.TopicPartition, new PartitionState(this));
                            }
                            partitionState[cr.TopicPartition].HandleConsumedMessage(cr, consumer, producer, funcExecSemaphore, errorCts);

                            aMessageHasBeenProcessed = true;
                        }
                    }
                    catch (OperationCanceledException) { }
                }

            if (errorCts.IsCancellationRequested)
            {
                throw new Exception("error occured, and we're failing fast.");
            }
        }