public KafkaConsumer(string zkConnectionString, string topic, string groupId, string consumerId, OnKafkaMessageReceived onMessageReceived, ConsumerConfig consumerConfig = null, bool start = true) { _consumerConfig = consumerConfig ?? ConsumerConfig.DefaultConfig; ZkConnectionString = zkConnectionString; Topic = topic; GroupId = groupId; ConsumerId = consumerId ?? string.Empty; SlidingDoors = new ConcurrentDictionary <int, SlidingDoor>(); ConsumerConfiguration = new ConsumerConfiguration { BackOffIncrement = _consumerConfig.BackOffIncrement, AutoCommit = false, GroupId = GroupId, ConsumerId = ConsumerId, BufferSize = ConsumerConfiguration.DefaultBufferSize, MaxFetchBufferLength = ConsumerConfiguration.DefaultMaxFetchBufferLength, FetchSize = ConsumerConfiguration.DefaultFetchSize, AutoOffsetReset = _consumerConfig.AutoOffsetReset, ZooKeeper = KafkaClient.GetZooKeeperConfiguration(zkConnectionString), ShutdownTimeout = 100 }; _onMessageReceived = onMessageReceived; if (start) { Start(); } }
public KafkaConsumer(string brokerList, string[] topics, string groupId, string consumerId, OnKafkaMessageReceived <TKey, TValue> onMessageReceived, ConsumerConfig consumerConfig = null) : base(topics, groupId, consumerId, consumerConfig) { BrokerList = brokerList; if (string.IsNullOrWhiteSpace(brokerList)) { throw new ArgumentException("Value cannot be null or whitespace.", nameof(brokerList)); } if (string.IsNullOrWhiteSpace(groupId)) { throw new ArgumentException("Value cannot be null or whitespace.", nameof(groupId)); } OnMessageReceived = onMessageReceived; ConsumerConfiguration = new Dictionary <string, string> { { "group.id", GroupId }, { "client.id", consumerId }, { "enable.auto.commit", false.ToString().ToLower() }, //{"socket.blocking.max.ms", ConsumerConfig["socket.blocking.max.ms"] ?? 50}, //{"fetch.error.backoff.ms", ConsumerConfig["fetch.error.backoff.ms"] ?? 50}, { "socket.nagle.disable", true.ToString().ToLower() }, //{"statistics.interval.ms", 60000}, { "retry.backoff.ms", ConsumerConfig.BackOffIncrement.ToString() }, { "bootstrap.servers", BrokerList }, { "auto.offset.reset", ConsumerConfig.AutoOffsetReset } }; }
private void ReceiveMessages(CancellationTokenSource cancellationTokenSource, OnKafkaMessageReceived onMessagesReceived) { IEnumerable <KafkaMessages.Message> messages = null; #region peek messages that not been consumed since last time while (!cancellationTokenSource.IsCancellationRequested) { try { //var linkedTimeoutCTS = CancellationTokenSource.CreateLinkedTokenSource(cancellationTokenSource.Token, // new CancellationTokenSource(3000).Token); messages = GetMessages(cancellationTokenSource.Token); foreach (var message in messages) { try { AddMessage(message); onMessagesReceived(this, message); BlockIfFullLoad(); } catch (OperationCanceledException) { return; } catch (ThreadAbortException) { return; } catch (Exception ex) { if (message.Payload != null) { RemoveMessage(message.PartitionId.Value, message.Offset); } _logger.Error(ex.GetBaseException().Message, ex); } } } catch (OperationCanceledException) { return; } catch (ThreadAbortException) { return; } catch (Exception ex) { if (!cancellationTokenSource.IsCancellationRequested) { Thread.Sleep(1000); _logger.Error(ex.GetBaseException().Message, ex); } } } #endregion }
public KafkaConsumer(string brokerList, string[] topics, string groupId, string consumerId, OnKafkaMessageReceived <TKey, TValue> onMessageReceived, ConsumerConfig consumerConfig = null) : base(topics, groupId, consumerId, consumerConfig) { BrokerList = brokerList; if (string.IsNullOrWhiteSpace(brokerList)) { throw new ArgumentException("Value cannot be null or whitespace.", nameof(brokerList)); } if (string.IsNullOrWhiteSpace(groupId)) { throw new ArgumentException("Value cannot be null or whitespace.", nameof(groupId)); } OnMessageReceived = onMessageReceived; ConsumerConfiguration = new Confluent.Kafka.ConsumerConfig { GroupId = GroupId, ClientId = consumerId, EnableAutoCommit = false, //{"socket.blocking.max.ms", ConsumerConfig["socket.blocking.max.ms"] ?? 50}, //{"fetch.error.backoff.ms", ConsumerConfig["fetch.error.backoff.ms"] ?? 50}, SocketNagleDisable = true, //{"statistics.interval.ms", 60000}, //{"retry.backoff.ms", ConsumerConfig.BackOffIncrement.ToString()}, BootstrapServers = BrokerList, AutoOffsetReset = (Confluent.Kafka.AutoOffsetReset)ConsumerConfig.AutoOffsetReset }; }
public KafkaConsumer(string brokerList, string topic, string groupId, string consumerId, OnKafkaMessageReceived <TKey, TValue> onMessageReceived, IDeserializer <TKey> keyDeserializer, IDeserializer <TValue> valueDeserializer, ConsumerConfig consumerConfig = null, bool start = true) { _keyDeserializer = keyDeserializer ?? throw new ArgumentNullException(nameof(keyDeserializer)); _valueDeserializer = valueDeserializer ?? throw new ArgumentNullException(nameof(valueDeserializer)); if (string.IsNullOrWhiteSpace(brokerList)) { throw new ArgumentException("Value cannot be null or whitespace.", nameof(brokerList)); } if (string.IsNullOrWhiteSpace(groupId)) { throw new ArgumentException("Value cannot be null or whitespace.", nameof(groupId)); } _consumerConfig = consumerConfig ?? ConsumerConfig.DefaultConfig; }
public KafkaConsumer(string brokerList, string topic, string groupId, string consumerId, OnKafkaMessageReceived <TKey, TValue> onMessageReceived, IDeserializer <TKey> keyDeserializer, IDeserializer <TValue> valueDeserializer, ConsumerConfig consumerConfig = null, bool start = true) { _keyDeserializer = keyDeserializer ?? throw new ArgumentNullException(nameof(keyDeserializer)); _valueDeserializer = valueDeserializer ?? throw new ArgumentNullException(nameof(valueDeserializer)); if (string.IsNullOrWhiteSpace(brokerList)) { throw new ArgumentException("Value cannot be null or whitespace.", nameof(brokerList)); } if (string.IsNullOrWhiteSpace(groupId)) { throw new ArgumentException("Value cannot be null or whitespace.", nameof(groupId)); } _consumerConfig = consumerConfig ?? ConsumerConfig.DefaultConfig; BrokerList = brokerList; Topic = topic; GroupId = groupId; ConsumerId = consumerId ?? string.Empty; SlidingDoors = new ConcurrentDictionary <int, SlidingDoor>(); ConsumerConfiguration = new Dictionary <string, object> { { "group.id", GroupId }, { "client.id", consumerId }, { "enable.auto.commit", false }, { "socket.blocking.max.ms", 10 }, { "fetch.error.backoff.ms", 10 }, { "socket.nagle.disable", true }, //{"statistics.interval.ms", 60000}, { "retry.backoff.ms", _consumerConfig.BackOffIncrement }, { "bootstrap.servers", BrokerList }, { "default.topic.config", new Dictionary <string, object> { { "auto.offset.reset", _consumerConfig.AutoOffsetReset } } } }; _onMessageReceived = onMessageReceived; if (start) { Start(); } }
public static KafkaConsumer CreateConsumer(string commandQueue, string consumerId) { OnKafkaMessageReceived onMessageReceived = (kafkaConsumer, kafkaMessage) => { var message = Encoding.UTF8.GetString(kafkaMessage.Payload); var sendTime = DateTime.Parse(message.Split('@')[1]); Console.WriteLine( $"consumer:{kafkaConsumer.ConsumerId} {DateTime.Now.ToString("HH:mm:ss.fff")} consume message: {message} cost: {(DateTime.Now - sendTime).TotalMilliseconds}"); kafkaConsumer.CommitOffset(kafkaMessage.PartitionId.Value, kafkaMessage.Offset); }; var consumer = new KafkaConsumer(zkConnectionString, commandQueue, $"{Environment.MachineName}.{commandQueue}", consumerId, onMessageReceived); return(consumer); }