Esempio n. 1
0
    public async Task ThisIsHowWeParty()
    {
        await _producer.SendMany(_topic, Enumerable.Range(0, 10).Select(n => new ToposMessage($"THIS IS MESSAGE {n}")), partitionKey : "whatever");

        using var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(10));

        using var consumer = _consumerBuilder.Build();

        consumer.Subscribe(_topic);

        try
        {
            while (true)
            {
                var result  = consumer.Consume(cancellationTokenSource.Token);
                var message = result.Message;

                var body = Encoding.UTF8.GetString(message.Value);
                Console.WriteLine($"Got message: {body}");
            }
        }
        catch (OperationCanceledException) when(cancellationTokenSource.IsCancellationRequested)
        {
        }
    }
Esempio n. 2
0
        static void Main(string[] args)
        {
            var config = new ConsumerConfig();

            config.BootstrapServers = "23.99.218.43:9092";
            config.GroupId          = "Grupo1";

            var builder = new ConsumerBuilder <string, string>(config);

            using (var consumer = builder.Build())
            {
                Console.Write("Ouvindo Kafka");
                consumer.Subscribe("15netkafka");

                while (true)
                {
                    var result = consumer.Consume(TimeSpan.FromSeconds(1));

                    if (result != null && result.Value != null)
                    {
                        PerguntaResposta perguntaResposta = JsonConvert.DeserializeObject <PerguntaResposta> (result.Value);

                        Console.WriteLine("Pergunta: " + perguntaResposta.Pergunta, "Resposta: " + perguntaResposta.Resposta);

                        InsertSQL(perguntaResposta.Pergunta, perguntaResposta.Resposta);
                    }
                }
            }
        }
        /// <summary>
        /// Need to return a <see cref="IConsumer{TKey, TValue}"/> for unit tests.
        /// Unfortunately <see cref="ConsumerBuilder{TKey, TValue}"/> returns <see cref="Consumer{TKey, TValue}"/>
        /// </summary>
        protected virtual IConsumer <TKey, TValue> CreateConsumer(
            ConsumerConfig config,
            Action <Consumer <TKey, TValue>, Error> errorHandler,
            Action <IConsumer <TKey, TValue>, List <TopicPartition> > partitionsAssignedHandler,
            Action <IConsumer <TKey, TValue>, List <TopicPartitionOffset> > partitionsRevokedHandler,
            IAsyncDeserializer <TValue> asyncValueDeserializer = null,
            IDeserializer <TValue> valueDeserializer           = null,
            IAsyncDeserializer <TKey> keyDeserializer          = null
            )
        {
            var builder = new ConsumerBuilder <TKey, TValue>(config)
                          .SetErrorHandler(errorHandler)
                          .SetPartitionsAssignedHandler(partitionsAssignedHandler)
                          .SetPartitionsRevokedHandler(partitionsRevokedHandler);

            if (keyDeserializer != null)
            {
                builder.SetKeyDeserializer(keyDeserializer);
            }

            if (asyncValueDeserializer != null)
            {
                builder.SetValueDeserializer(asyncValueDeserializer);
            }
            else if (valueDeserializer != null)
            {
                builder.SetValueDeserializer(valueDeserializer);
            }

            return(builder.Build());
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="KafkaReceiver"/> class.
        /// </summary>
        /// <param name="name">The name of the receiver.</param>
        /// <param name="topic">
        /// The topic to subscribe to. A regex can be specified to subscribe to the set of
        /// all matching topics (which is updated as topics are added / removed from the
        /// cluster). A regex must be front anchored to be recognized as a regex. e.g. ^myregex
        /// </param>
        /// <param name="consumerConfig">The configuration used in creation of the Kafka consumer.</param>
        public KafkaReceiver(string name, string topic, ConsumerConfig consumerConfig)
            : base(name)
        {
            if (consumerConfig is null)
            {
                throw new ArgumentNullException(nameof(consumerConfig));
            }

            if (consumerConfig.EnableAutoCommit is false)
            {
                throw new ArgumentOutOfRangeException(nameof(consumerConfig), "The 'EnableAutoCommit' setting must be true.");
            }

            Topic                 = topic ?? throw new ArgumentNullException(nameof(topic));
            GroupId               = consumerConfig.GroupId;
            BootstrapServers      = consumerConfig.BootstrapServers;
            EnableAutoOffsetStore = consumerConfig.EnableAutoOffsetStore;
            AutoOffsetReset       = consumerConfig.AutoOffsetReset;

            var builder = new ConsumerBuilder <string, byte[]>(consumerConfig);

            builder.SetErrorHandler(OnError);

            _consumer = new Lazy <IConsumer <string, byte[]> >(() => builder.Build());

            _pollingThread = new Lazy <Thread>(() => new Thread(PollForMessages)
            {
                IsBackground = true
            });
            _trackingThread = new Lazy <Thread>(() => new Thread(TrackMessageHandling)
            {
                IsBackground = true
            });
        }
Esempio n. 5
0
        public void StartConsuming()
        {
            var consumerBuilder = new ConsumerBuilder <string, Payload>(_config);

            consumerBuilder.SetValueDeserializer(new ProtoDeserializer <Payload>());

            using (var consumer = consumerBuilder.Build())
            {
                try
                {
                    consumer.Subscribe(_settings.Topic);

                    while (true)
                    {
                        var result = consumer.Consume(CancellationToken.None);
                        Console.WriteLine("From key {0} value: {1}", result.Key, result.Value);
                        Save(result.Key, result.Value);
                        consumer.Commit(result);
                    }
                }
                finally
                {
                    consumer.Close();
                }
            }
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="KafkaReceiver"/> class.
        /// </summary>
        /// <param name="name">The name of the receiver.</param>
        /// <param name="topic">
        /// The topic to subscribe to. A regex can be specified to subscribe to the set of
        /// all matching topics (which is updated as topics are added / removed from the
        /// cluster). A regex must be front anchored to be recognized as a regex. e.g. ^myregex
        /// </param>
        /// <param name="groupId">
        /// Client group id string. All clients sharing the same group.id belong to the same group.
        /// </param>
        /// <param name="bootstrapServers">
        /// List of brokers as a CSV list of broker host or host:port.
        /// </param>
        /// <param name="enableAutoOffsetStore">
        /// Whether to automatically store offset of last message provided to application.
        /// </param>
        /// <param name="autoOffsetReset">
        /// Action to take when there is no initial offset in offset store or the desired
        /// offset is out of range: 'smallest','earliest' - automatically reset the offset
        /// to the smallest offset, 'largest','latest' - automatically reset the offset to
        /// the largest offset, 'error' - trigger an error which is retrieved by consuming
        /// messages and checking 'message->err'.
        /// </param>
        public KafkaReceiver(string name, string topic, string groupId, string bootstrapServers,
                             bool enableAutoOffsetStore = false, AutoOffsetReset autoOffsetReset = Confluent.Kafka.AutoOffsetReset.Latest)
            : base(name)
        {
            Topic                 = topic ?? throw new ArgumentNullException(nameof(topic));
            GroupId               = groupId ?? throw new ArgumentNullException(nameof(groupId));
            BootstrapServers      = bootstrapServers ?? throw new ArgumentNullException(nameof(bootstrapServers));
            EnableAutoOffsetStore = enableAutoOffsetStore;
            AutoOffsetReset       = autoOffsetReset;

            var config  = GetConsumerConfig(groupId, bootstrapServers, enableAutoOffsetStore, autoOffsetReset);
            var builder = new ConsumerBuilder <string, byte[]>(config);

            builder.SetErrorHandler(OnError);

            _consumer = new Lazy <IConsumer <string, byte[]> >(() => builder.Build());

            _pollingThread = new Lazy <Thread>(() => new Thread(PollForMessages)
            {
                IsBackground = true
            });
            _trackingThread = new Lazy <Thread>(() => new Thread(TrackMessageHandling)
            {
                IsBackground = true
            });
        }
Esempio n. 7
0
        protected override async Task ExecuteAsync(CancellationToken stoppingToken)
        {
            using (var scope = _serviceScopeFactory.CreateScope())
            {
                _handler = scope.ServiceProvider.GetRequiredService <IKafkaHandler <TK, TV> >();

                var builder = new ConsumerBuilder <TK, TV>(_config).SetValueDeserializer(new KafkaDeserializer <TV>());

                using (IConsumer <TK, TV> consumer = builder.Build())
                {
                    consumer.Subscribe(_config.Topic);

                    while (!stoppingToken.IsCancellationRequested)
                    {
                        var result = consumer.Consume(TimeSpan.FromMilliseconds(1000));

                        if (result != null)
                        {
                            await _handler.HandleAsync(result.Message.Key, result.Message.Value);

                            consumer.Commit(result);

                            consumer.StoreOffset(result);
                        }
                    }
                }
            }
        }
Esempio n. 8
0
        public override Action Connection(IEnumerable <KeyValuePair <string, object> > options)
        {
            string borkerList = "";
            var    list       = options.GetEnumerator();

            while (list.MoveNext())
            {
                if ("BorkerList".Equals(list.Current.Key))
                {
                    borkerList = list.Current.Value.ToString();
                }
            }
            var cConfig = new ConsumerConfig
            {
                BootstrapServers      = borkerList,
                BrokerVersionFallback = "0.10.0.0",
                ApiVersionFallbackMs  = 0,
                //SaslMechanism = SaslMechanism.Plain,
                //SecurityProtocol = SecurityProtocol.SaslSsl,
                //SslCaLocation = "/usr/local/etc/openssl/cert.pem", // suitable configuration for linux, osx.
                // SslCaLocation = "c:\\path\\to\\cacert.pem",     // windows
                //SaslUsername = "******",
                //SaslPassword = "******",
                GroupId         = Guid.NewGuid().ToString(),
                AutoOffsetReset = AutoOffsetReset.Earliest
            };

            return(() =>
            {
                var consumerBuilder = new ConsumerBuilder <string, MessageBase>(cConfig);
                consumerBuilder.SetErrorHandler(OnConnectionException);
                _consumerClient = consumerBuilder.Build();
            });
        }
Esempio n. 9
0
        public IConsumer <byte[], byte[]> GetConsumer(ConsumerConfig config, IConsumerRebalanceListener rebalanceListener)
        {
            ConsumerBuilder <byte[], byte[]> builder = builderKafkaHandler.GetConsumerBuilder(config);

            if (rebalanceListener != null)
            {
                builder.SetPartitionsAssignedHandler((c, p) => rebalanceListener.PartitionsAssigned(c, p));
                builder.SetPartitionsRevokedHandler((c, p) => rebalanceListener.PartitionsRevoked(c, p));
                builder.SetLogHandler(loggerAdapter.LogConsume);
                builder.SetErrorHandler(loggerAdapter.ErrorConsume);
                if (exposeLibrdKafka)
                {
                    // TODO : test librdkafka statistics with IntegrationTest (WIP see #82)
                    var consumerStatisticsHandler = new ConsumerStatisticsHandler(
                        config.ClientId,
                        streamConfig.ApplicationId,
                        (config as StreamizConsumerConfig)?.ThreadId);
                    consumerStatisticsHandler.Register(MetricsRegistry);
                    builder.SetStatisticsHandler((c, stat) =>
                    {
                        var statistics = JsonConvert.DeserializeObject <Statistics>(stat);
                        consumerStatisticsHandler.Publish(statistics);
                    });
                }
            }

            return(builder.Build());
        }
Esempio n. 10
0
        public IConsumer <Ignore, byte[]> CreateConsumer(
            KafkaQueueConfiguration config,
            Action <IConsumer <Ignore, byte[]>, LogMessage> logHandler = null,
            Action <IConsumer <Ignore, byte[]>, Error> errorHandler    = null)
        {
            config.ThrowIfNull(nameof(config));

            var builder = new ConsumerBuilder <Ignore, byte[]>(new ConsumerConfig
            {
                GroupId                 = config.GroupId,
                BootstrapServers        = $"{config.Server}",
                AutoOffsetReset         = config.AutoOffsetReset,
                SaslKerberosKeytab      = config.KeyTab,
                SaslKerberosPrincipal   = config.User,
                SaslKerberosServiceName = config.ServiceName,
                SecurityProtocol        = config.Protocol,
                SaslMechanism           = config.Mechanism,
                Debug = config.Debug
            });

            if (logHandler != null)
            {
                builder.SetLogHandler(logHandler);
            }
            if (errorHandler != null)
            {
                builder.SetErrorHandler(errorHandler);
            }

            return(builder.Build());
        }
Esempio n. 11
0
        public void Subscribe(Action <T> dealMessage)
        {
            var config = new ConsumerConfig
            {
                GroupId          = ConsumerGroup,
                BootstrapServers = "192.168.0.7:9092",
                AutoOffsetReset  = AutoOffsetReset.Latest
            };

            Task.Run(() =>
            {
                var builder = new ConsumerBuilder <string, string>(config);
                using (var consumer = builder.Build())
                {
                    consumer.Subscribe(Topic);
                    while (true)
                    {
                        var result = consumer.Consume();
                        try
                        {
                            var message = JsonConvert.DeserializeObject <T>(result.Message.Value);
                            dealMessage(message);
                        }
                        catch (Exception)
                        {
                            Console.WriteLine($"Topic : {result.Topic}, Message : {result.Message.Value}");
                        }
                    }
                }
            });
        }
Esempio n. 12
0
        public IConsumer <string, string> GetConsumer()
        {
            var customer = _consumerBuilder.Build();

            _consumers.Add(customer);
            return(customer);
        }
Esempio n. 13
0
        static void Main(string[] args)
        {
            var config = new ConsumerConfig
            {
                BootstrapServers = "192.168.56.1:9093",
                GroupId          = "consumer-group-ssl",
                ClientId         = "consumer-ssl-01",
                SaslMechanism    = SaslMechanism.Plain,
                SecurityProtocol = SecurityProtocol.SaslSsl,
                SaslUsername     = "******",
                SaslPassword     = "******",
                SslEndpointIdentificationAlgorithm = SslEndpointIdentificationAlgorithm.None,
                SslCaLocation          = @"root.crt",
                SslCertificateLocation = @"consumer_client.crt",
                SslKeyLocation         = @"consumer_client.key"
            };

            var builder = new ConsumerBuilder <string, string>(config);

            using (var consumer = builder.Build())
            {
                consumer.Subscribe("test");
                while (true)
                {
                    var record = consumer.Consume(1000);
                    if (record != null)
                    {
                        Console.WriteLine($"Key:{record.Message.Key}|Value:{record.Message.Value}");
                    }
                }
            }
        }
Esempio n. 14
0
        // static void Main(string[] args)
        // {
        //     CreateConsumerAndConsume();
        // }

        public void CreateConsumerAndConsume()
        {
            var cb = new ConsumerBuilder <string, string>(consumerConfig);

            Console.WriteLine("Press Ctrl+C to exit");
            Console.CancelKeyPress += new ConsoleCancelEventHandler(OnExit);
            using (var consumer = cb.Build())
            {
                consumer.Subscribe("first_topic");
                try
                {
                    while (!cts.IsCancellationRequested)
                    {
                        var cr     = consumer.Consume(cts.Token);
                        var offset = cr.TopicPartitionOffset;
                        Console.WriteLine($"Message '{cr.Value}' at: '{offset}'.");
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                    consumer.Close();
                }
            }
        }
Esempio n. 15
0
        static void Main(string[] args)
        {
            var config = new ConsumerConfig
            {
                GroupId          = "test-app",
                BootstrapServers = "192.168.56.1:9092",
                SecurityProtocol = SecurityProtocol.SaslPlaintext,
                SaslMechanism    = SaslMechanism.Plain,
                SaslUsername     = "******",
                SaslPassword     = "******",
                Debug            = "generic, broker, topic, metadata, consumer"
            };

            var builder = new ConsumerBuilder <string, string>(config);

            using (var consumer = builder.Build())
            {
                var water = consumer.GetWatermarkOffsets(new TopicPartition("test", 0));
                consumer.Assign(new TopicPartition("test", 0));
                //Thread.Sleep(5000);
                consumer.Seek(new TopicPartitionOffset("test", 0, water.Low));
                while (true)
                {
                    var r = consumer.Consume(1000);
                }
            }
        }
Esempio n. 16
0
        private IConsumer <TKey, TValue> BuildConsumer <TKey, TValue>(ConsumerConfig configuration, Action <ConsumerBuilder <TKey, TValue> > configure)
        {
            var consumerBuilder = new ConsumerBuilder <TKey, TValue>(configuration);

            configure?.Invoke(consumerBuilder);
            return(consumerBuilder.Build());
        }
Esempio n. 17
0
        /// <summary>
        /// 初始化
        /// </summary>
        /// <param name="_config_">服务器、Topic、用户名和密码</param>
        public KafkaConsumer(KafkaConsumerConfigForCredit _config_, IDeserializer <T> _deserializer_ = null)
        {
            this.Topics    = _config_.Topics;
            consumerConfig = new ConsumerConfig
            {
                BootstrapServers     = _config_.BrokerServers,
                SaslUsername         = _config_.SaslUsername,
                SaslPassword         = _config_.SaslPassword,
                SaslMechanism        = _config_.SaslMechanism,
                SecurityProtocol     = _config_.SecurityProtocol,
                GroupId              = (string.IsNullOrEmpty(_config_.GroupId)) ? Guid.NewGuid().ToString() : _config_.GroupId,
                EnableAutoCommit     = _config_.EnableAutoCommit,
                StatisticsIntervalMs = (_config_.StatisticsIntervalMs == 0) ? 6000 : _config_.StatisticsIntervalMs,
                SessionTimeoutMs     = (_config_.SessionTimeoutMs == 0) ? 6000 : _config_.SessionTimeoutMs,
                AutoOffsetReset      = _config_.AutoOffsetReset,
                EnablePartitionEof   = _config_.EnablePartitionEof
            };
            var consumerBuilder = new ConsumerBuilder <string, T>(consumerConfig);

            if (_deserializer_ != null)
            {
                consumer = consumerBuilder.SetValueDeserializer(_deserializer_).Build();
            }
            consumer = consumerBuilder.Build();
        }
        public IEnumerable <ConsumerResult> CreateConsumerAndConsume(ConsumerConfig consumerConfig, string topic, CancellationToken cts)
        {
            var result = new List <ConsumerResult>();
            var cb     = new ConsumerBuilder <string, string>(consumerConfig);

            using (var consumer = cb.Build())
            {
                consumer.Subscribe(topic);
                try
                {
                    while (!cts.IsCancellationRequested)
                    {
                        var cr     = consumer.Consume(cts);
                        var offset = cr.TopicPartitionOffset;
                        result.Add(new ConsumerResult {
                            Message = cr.Message.Value, TopicOffset = offset
                        });
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                    consumer.Close();
                }
            }

            return(result);
        }
        //For integration test purpose.
        public ConsumerResult CreateConsumerAndConsumeSingleMessage(ConsumerConfig consumerConfig, string topic, CancellationToken cts)
        {
            ConsumerResult result = null;

            var cb = new ConsumerBuilder <string, string>(consumerConfig);

            using (var consumer = cb.Build())
            {
                consumer.Subscribe(topic);
                try
                {
                    var cr     = consumer.Consume(cts);
                    var offset = cr.TopicPartitionOffset;
                    result = new ConsumerResult {
                        Message = cr.Message.Value, TopicOffset = offset
                    };
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                    consumer.Close();
                }
            }

            return(result);
        }
Esempio n. 20
0
        private async Task ConsumeTopic(CancellationToken stoppingToken)
        {
            using (var scope = _serviceScopeFactory.CreateScope())
            {
                var handler = scope.ServiceProvider.GetRequiredService <IKafkaHandler <TK, TV> >();

                var builder = new ConsumerBuilder <TK, TV>(_config).SetValueDeserializer(new KafkaDeserializer <TV>());

                using (IConsumer <TK, TV> consumer = builder.Build())
                {
                    consumer.Subscribe(_config.Topic);

                    while (stoppingToken.IsCancellationRequested == false)
                    {
                        try
                        {
                            var result = consumer.Consume(3000);

                            if (result != null)
                            {
                                await handler.HandleAsync(result.Message.Key, result.Message.Value);

                                consumer.Commit(result);
                            }
                        }
                        catch (Exception ex)
                        {
                            Console.Write(ex);
                        }
                    }
                }
            }
        }
Esempio n. 21
0
        #pragma warning disable 8618
        public EventConsumer(
            IEventDeserializer eventDeserializer,
            EventConsumerConfig config,
            ILogger <EventConsumer <TAggregate, TAggregateId, TDeserializer> > logger)
        {
            _eventDeserializer = eventDeserializer;
            _logger            = logger;

            var aggregateType = typeof(TAggregate);

            var consumerConfig = new ConsumerConfig
            {
                GroupId            = config.ConsumerGroup,
                BootstrapServers   = config.KafkaConnectionString,
                AutoOffsetReset    = AutoOffsetReset.Earliest,
                EnablePartitionEof = true
            };

            var consumerBuilder        = new ConsumerBuilder <TAggregateId, string>(consumerConfig);
            var keyDeserializerFactory = new KeyDeserializerFactory();

            consumerBuilder.SetKeyDeserializer(keyDeserializerFactory.Create <TDeserializer, TAggregateId>());

            _eventConsumer = consumerBuilder.Build();

            var topicName = $"{config.TopicBaseName}-{aggregateType.Name}";

            _eventConsumer.Subscribe(topicName);
        }
Esempio n. 22
0
        /// <summary>
        /// Initializes a new instance of the <see cref="KafkaReceiver"/> class.
        /// </summary>
        /// <param name="name">The name of the receiver.</param>
        /// <param name="topic">
        /// The topic to subscribe to. A regex can be specified to subscribe to the set of
        /// all matching topics (which is updated as topics are added / removed from the
        /// cluster). A regex must be front anchored to be recognized as a regex. e.g. ^myregex
        /// </param>
        /// <param name="groupId">
        /// Client group id string. All clients sharing the same group.id belong to the same
        /// group.
        /// </param>
        /// <param name="bootstrapServers">
        /// List of brokers as a CSV list of broker host or host:port.
        /// </param>
        /// <param name="config">
        /// A collection of librdkafka configuration parameters (refer to
        /// https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md) and parameters
        /// specific to this client (refer to: Confluent.Kafka.ConfigPropertyNames).
        /// </param>
        public KafkaReceiver(string name, string topic, string groupId, string bootstrapServers, ConsumerConfig config = null)
            : base(name)
        {
            Topic  = topic ?? throw new ArgumentNullException(nameof(topic));
            Config = config ?? new ConsumerConfig();

            Config.GroupId          = groupId ?? throw new ArgumentNullException(nameof(groupId));
            Config.BootstrapServers = bootstrapServers ?? throw new ArgumentNullException(nameof(bootstrapServers));
            Config.EnableAutoCommit = Config.EnableAutoCommit ?? false;

            var consumerBuilder = new ConsumerBuilder <Ignore, string>(Config);

            consumerBuilder.SetErrorHandler(OnError);

            _consumer = new Lazy <IConsumer <Ignore, string> >(() => consumerBuilder.Build());

            _pollingThread = new Lazy <Thread>(() => new Thread(PollForMessages)
            {
                IsBackground = true
            });
            _trackingThread = new Lazy <Thread>(() => new Thread(TrackMessageHandling)
            {
                IsBackground = true
            });
        }
        protected override async Task ExecuteAsync(CancellationToken cancellationToken)
        {
            CancellationTokenSource cancellationTokenSource = new CancellationTokenSource();
            var consumer = _consumerBuilder.Build();

            consumer.Subscribe("LogTopic");

            while (!cancellationToken.IsCancellationRequested)
            {
                try
                {
                    var data = consumer.Consume(cancellationTokenSource.Token);

                    if (data.Message != null)
                    {
                        //Message message = JsonConvert.DeserializeObject<Message>(data.Value);
                        System.Console.WriteLine(data.Value);
                    }
                    await Task.Delay(1000, cancellationToken);

                    //Write logic to process message
                }
                catch (System.Exception ex)
                {
                    throw ex;
                }
            }
        }
Esempio n. 24
0
        static void Assign(ConsumerBuilder <string, string> builder, string bootstrap, string groupId, string topic)
        {
            var offsets = new List <TopicPartitionOffset>
            {
                new TopicPartitionOffset(new TopicPartition(topic, 0), 2),
                new TopicPartitionOffset(new TopicPartition(topic, 1), 1),
            };

            // Check if none consumer is register for this consumer group
            var adminClientConfig = new AdminClientConfig();

            adminClientConfig.BootstrapServers = bootstrap;
            var adminClientBuilder = new AdminClientBuilder(adminClientConfig);

            using (var adminClient = adminClientBuilder.Build())
            {
                var groupInfo = adminClient.ListGroup(groupId, TimeSpan.FromSeconds(10));
                if (groupInfo.Members.Count > 0)
                {
                    Console.WriteLine($"Error consumers already exist in this consumer group {groupId}");
                    foreach (var member in groupInfo.Members)
                    {
                        Console.WriteLine(
                            $"Member {member.MemberId} (client.id:{member.ClientId}#client.host:{member.ClientHost}) =" +
                            $" Assigment {DecodeMemberAssignment(member.MemberAssignment)}");
                    }
                    return;
                }
            }

            using (var consumer = builder.Build())
            {
                consumer.Commit(offsets);
            }
        }
Esempio n. 25
0
        public void ConsumeMessage(string topicName)
        {
            String ErrorReason = String.Empty;

            var consumeBldr = new ConsumerBuilder <Ignore, string>(_config.ConsumerConfig());
            {
                //Added Error handler
                consumeBldr.SetErrorHandler(ConsumerErrorHandler);
                consumeBldr.SetLogHandler(LogHandler);
                var consumer = consumeBldr.Build();

                try
                {
                    consumer.Subscribe(topicName);
                    ReadMessages(consumer);
                }
                catch (Exception ex)
                {
                    // if this exception occured then
                    ErrorReason = $"Error : { ex.Message } Type of { ex.GetType().Name } not handled and closing the consumer. { ex.InnerException?.Message } Source: { ex?.Source } .Stack :{ ex?.StackTrace}";
                    _worker.UnProcessedMessage("Empty Message Found", ErrorReason, SyslogLevel.Error);
                    consumer.Close();//TBD
                    consumer?.Dispose();
                }
            }
        }
Esempio n. 26
0
        public IConsumer <byte[], byte[]> GetRestoreConsumer(ConsumerConfig config)
        {
            ConsumerBuilder <byte[], byte[]> builder = builderKafkaHandler.GetConsumerBuilder(config);

            builder.SetLogHandler(loggerAdapter.LogConsume);
            builder.SetErrorHandler(loggerAdapter.ErrorConsume);
            return(builder.Build());
        }
Esempio n. 27
0
        public IConsumer <string, string> Create()
        {
            var config  = new ConsumerConfig(_configuration.GetConsumerConfiguration());
            var builder = new ConsumerBuilder <string, string>(config);

            builder.SetErrorHandler(OnKafkaError);
            return(builder.Build());
        }
        /// <summary>
        /// Gets a consumer.
        /// </summary>
        protected virtual IConsumer <string, byte[]> GetConsumer(string bootstrapServers,
                                                                 bool enableAutoOffsetStore, AutoOffsetReset autoOffsetReset)
        {
            var config  = GetConsumerConfig(ReplayGroupId, bootstrapServers, enableAutoOffsetStore, autoOffsetReset);
            var builder = new ConsumerBuilder <string, byte[]>(config);

            return(builder.Build());
        }
        protected override IKafkaConsumer <TKey, TValue> CreateKafkaConsumer(ConsumerBuilder <TKey, TValue> consumerBuilder, string kafkaTopic)
        {
            var consumer      = consumerBuilder.Build();
            var kafkaConsumer = new KafkaConsumerWithMetrics <TKey, TValue>(kafkaTopic, consumer, Stats);

            Stats.Metric().CountInstances(kafkaConsumer).GetAwaiter().GetResult();

            return(kafkaConsumer);
        }
Esempio n. 30
0
        public IConsumer <byte[], byte[]> GetGlobalConsumer(ConsumerConfig config)
        {
            ConsumerBuilder <byte[], byte[]> builder = new ConsumerBuilder <byte[], byte[]>(config);

            // TOOD : Finish
            builder.SetLogHandler(loggerAdapter.LogConsume);
            builder.SetErrorHandler(loggerAdapter.ErrorConsume);
            return(builder.Build());
        }