static void Main(string[] args) { Console.WriteLine("Hello World!"); // Move config to appsettings.json var config = new ConsumerConfig() { BootstrapServers = "localhost:9092", AutoOffsetReset = AutoOffsetReset.Earliest, ClientId = "Consumer 2", GroupId = "Bet API Sport Consumer", IsolationLevel = IsolationLevel.ReadCommitted, EnableAutoCommit = false, StatisticsIntervalMs = 10000 }; // New up a Producer class var consumer = new ConsumerBuilder <string, string>(config) .SetErrorHandler(Handle_Error) .SetLogHandler(Handle_Log) .SetStatisticsHandler(Handle_Stats) .Build(); CancellationTokenSource cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; cts.Cancel(); }; consumer.Subscribe("betapisports"); try { var commitCounter = 0; while (!cts.Token.IsCancellationRequested) { commitCounter += 1; var consumeResult = consumer.Consume(cts.Token); var message = consumeResult.Message.Value; Console.WriteLine($"Message Consumed: TPO={consumeResult.Topic}-{consumeResult.Partition}-{consumeResult.Offset}, Value={message.ToString()}"); if (commitCounter >= 10) { consumer.Commit(consumeResult); commitCounter = 0; } } } catch (OperationCanceledException) { } finally { consumer.Commit(); consumer.Close(); } }
static void Main(string[] args) { Console.WriteLine("Hello Again Kafkateers!"); var config = new ConsumerConfig() { BootstrapServers = "localhost:9092", AutoOffsetReset = AutoOffsetReset.Earliest, ClientId = "Consumer 1", GroupId = "Group 1", IsolationLevel = IsolationLevel.ReadCommitted, EnableAutoCommit = false, StatisticsIntervalMs = 10000 }; var consumer = new ConsumerBuilder <string, string>(config) .SetErrorHandler(Handle_Error) .SetLogHandler(Handle_Log) .SetStatisticsHandler(Handle_Stats) .Build(); CancellationTokenSource cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; cts.Cancel(); }; consumer.Subscribe("TESTS"); try { var commitCounter = 0; while (!cts.Token.IsCancellationRequested) { commitCounter += 1; var consumeResult = consumer.Consume(cts.Token); var message = JObject.Parse(consumeResult.Message.Value); Console.WriteLine($"Message Consumed: TPO={consumeResult.Topic}-{consumeResult.Partition}-{consumeResult.Offset}, Value={message.ToString()}"); if (commitCounter >= 10) { consumer.Commit(consumeResult); commitCounter = 0; } } } catch (OperationCanceledException) { //Time to exit swiftly } finally { consumer.Commit(); consumer.Close(); } }
public Task StartAsync(CancellationToken cancellationToken) { string servidorKafka = _configuration.GetSection("Kafka").Value; var conf = new ConsumerConfig { GroupId = "Consumidor-EGF.Processos", BootstrapServers = servidorKafka, AutoOffsetReset = AutoOffsetReset.Earliest }; using (var c = new ConsumerBuilder <Ignore, string>(conf).Build()) { c.Subscribe("EGF.Processos"); var cts = new CancellationTokenSource(); try { while (true) { var message = c.Consume(cts.Token); try { var processo = JsonSerializer.Deserialize <EntidadeDeProcesso>(message.Message.Value); var tipo = processo.Tipo; if (_executores.TryGetValue(tipo, out Type tipoExecutor)) { using var escopo = _services.CreateScope(); var gerenciadorDeLicenca = escopo.ServiceProvider.GetService <IGerenciadorDeLicenca>(); var licenca = gerenciadorDeLicenca.ObterLicencaDoHash(processo.Licenca); var fabricaDeConexao = escopo.ServiceProvider.GetService <IFabricaDeConexao>(); fabricaDeConexao.DefinirLicenca(licenca); var executor = escopo.ServiceProvider.GetRequiredService(tipoExecutor); var assembly = Assembly.Load(processo.Assembly); var type = assembly.GetType(tipo); var processoConvertido = JsonSerializer.Deserialize(message.Message.Value, type); tipoExecutor.GetMethod("Executar").Invoke(executor, new object[] { processoConvertido }); c.Commit(message); } } catch (Exception e) { c.Commit(message); } } } catch (OperationCanceledException) { c.Close(); } } return(Task.CompletedTask); }
protected override async Task ExecuteAsync(CancellationToken stoppingToken) { // so basically, at this point the message is already in the topic, and we can handle the data how we please. // var message above is the JSON object/ or can be Serialized into JSON to be consumed and can be pushed to couhcbase, sql, signalr at our own will - without slowing things down. // We can have n-number of consumers listening for topics eg: live in-play: fixtureSnapshotsTopic & updateTopic. // So the same producer can push to 2 different topics i.e. processSnapshot & processSnapshotUpdate. // We will create a new consumer group, which can contain a SnapshotConsumer & UpdateMarketConsumer (odd, suspensions etc) // One of the above Consumers can send data to signalr and the other to Couchbase // New up a Producer class var consumer = new ConsumerBuilder <string, string>(this._config) .SetErrorHandler(Handle_Error) .SetLogHandler(Handle_Log) .SetStatisticsHandler(Handle_Stats) .Build(); // Topic Name we listening to for data streams from Kafka consumer.Subscribe("betapisports"); try { var commitCounter = 0; while (!stoppingToken.IsCancellationRequested) { commitCounter += 1; var consumeResult = consumer.Consume(stoppingToken); var message = consumeResult.Message.Value; var serializedObject = JsonConvert.DeserializeObject <SyXSport>(message); // Store Sport JSON in Redis this._connection.SaveKeyValueToDB(serializedObject.Id.ToString(), message, 0, 480); // Broadcast Sport JSON using SignalR so UI can detect the change await this._hub.SendMessage(message); Console.WriteLine($"Message Consumed: TPO={consumeResult.Topic}-{consumeResult.Partition}-{consumeResult.Offset}, Value={message.ToString()}"); if (commitCounter >= 10) { consumer.Commit(consumeResult); commitCounter = 0; } } } catch (OperationCanceledException) { } finally { consumer.Commit(); consumer.Close(); } }
public static void Start() { try { var conf = new ConsumerConfig { BootstrapServers = bootstrapServers, GroupId = "kaisen", AutoOffsetReset = AutoOffsetReset.Earliest, EnableAutoCommit = false }; using (var c = new ConsumerBuilder <Ignore, string>(conf).Build()) { c.Subscribe(pollTopic); CancellationTokenSource cts = new CancellationTokenSource(); while (true) { var cr = c.Consume(cts.Token); if (cr == null) { Thread.Sleep(1000); continue; } string xmlStr = cr.Value; c.Commit(); Console.WriteLine($"Consumer:{xmlStr}"); } } } catch (Exception ex) { Console.WriteLine($"Consumer<Error>:{ex}"); } }
public ConsumeResult <TKey, TValue> Consume(string Topic) { ConsumeResult <TKey, TValue> consumeResult = null; using (var consumerBuilder = new ConsumerBuilder <TKey, TValue>(_config.Value.ConsumerConfig) .SetErrorHandler((_, e) => _logger.LogError($"Error: {e.Reason}")) //.SetStatisticsHandler((_, json) => _logger.LogInformation($"Statistics: {json}")) .Build()) { consumerBuilder.Subscribe(Topic); try { try { consumeResult = consumerBuilder.Consume(); consumerBuilder.Commit(consumeResult); } catch (ConsumeException e) { _logger.LogError($"Error occured: {e.Error.Reason}"); } } catch (OperationCanceledException) { consumerBuilder.Close(); } } return(consumeResult); }
public async Task StartAsync(CancellationToken cancellationToken) { Console.WriteLine("Hello World!"); var config = new List <KeyValuePair <string, string> > { new KeyValuePair <string, string>("group.id", "1"), new KeyValuePair <string, string>("bootstrap.servers", "localhost:9092"), new KeyValuePair <string, string>("enable.auto.commit", "true"), new KeyValuePair <string, string>("auto.offset.reset", "latest") }; using (var consumer = new ConsumerBuilder <string, string>(config).Build()) { consumer.Subscribe("workTopic"); try { while (true) { // consume event from Kafka var message = consumer.Consume(); consumer.Commit(); Console.WriteLine(message.Value); } } catch (Exception e) { consumer.Close(); } } }
static void Main(string[] args) { try { var config = new ConsumerConfig { GroupId = "product-consumer", BootstrapServers = "localhost:9092" }; using ( var consumer = new ConsumerBuilder <string, string>(config).Build() ) { consumer.Subscribe("127.0.0.1.dbo.Product"); while (true) { ConsumeResult <string, string> consumeResult = consumer.Consume(); Console .WriteLine($"Mesaj {consumeResult.TopicPartitionOffset} isimli topic'ten alındı :{consumeResult.Value}"); consumer.Commit(); } } } catch (System.Exception ex) { Console.WriteLine(ex.Message); } }
public void Subscribe(Type type) { using (var consumer = new ConsumerBuilder <string, string>(_consumerConfig).Build()) { consumer.Subscribe(_topics); while (true) { var consumerResult = consumer.Consume(); if (consumerResult.IsPartitionEOF) { continue; } var @event = JsonConvert.DeserializeObject(consumerResult.Message.Value, type) as IEvent; using (var scope = _serviceFactory.CreateScope()) { var eventBus = scope.ServiceProvider.GetService <IEventBus>(); eventBus.PublishLocal(@event); consumer.Commit(); } } } }
public void Consumir(string topico) { ConsumerConfig.GroupId = ConsumerGroup; //Este token será usado para mapear o CTRL+C e cancelar o consumo do broker. Seu uso é opcional. CancellationTokenSource cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; cts.Cancel(); }; using (var consumer = new ConsumerBuilder <string, string>(ConsumerConfig).Build()) { consumer.Subscribe(topico); try { Console.WriteLine("Preparando para consumir... Novos registros chegarão imediatamente.\nPressione CTRL+C para encerrar"); while (true) { var cr = consumer.Consume(cts.Token); Console.WriteLine($"Consumido registro da partição {cr.Partition}, offset {cr.Offset}, chave '{cr.Message.Key}' e valor '{cr.Message.Value}'"); consumer.Commit(cr); } } catch (OperationCanceledException) { // Ctrl-C foi pressionado. } finally { consumer.Close(); } } }
public object GetStashedMessage() { var conf = new ConsumerConfig { GroupId = this._topic, BootstrapServers = this._bootstrapServers, EnableAutoCommit = true, StatisticsIntervalMs = 5000, SessionTimeoutMs = 6000, AutoOffsetReset = AutoOffsetReset.Earliest, EnablePartitionEof = true }; using var consumer = new ConsumerBuilder <string, string>(conf) .Build(); consumer.Subscribe(this._topic); CancellationTokenSource cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; cts.Cancel(); }; var result = consumer.Consume(cts.Token); consumer.Commit(); consumer.Close(); return(JsonConvert.DeserializeObject(result.Message.Value, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.All })); }
public async Task IniciarConsumo(CancellationTokenSource cts) { //para permitir que em caso de erro, não realizar commit para não perder a mensagem _brokerHelper.ConsumerConfig.EnableAutoCommit = false; _brokerHelper.ConsumerConfig.AutoOffsetReset = AutoOffsetReset.Earliest; _brokerHelper.ConsumerConfig.GroupId = _consumerGroup; using (var consumer = new ConsumerBuilder <string, string>(_brokerHelper.ConsumerConfig).Build()) { consumer.Subscribe(_topico); while (!cts.IsCancellationRequested) { try { var cr = consumer.Consume(cts.Token); _messageWriter.Write($"Consumiu (key : message): {cr.Message.Key} : {cr.Message.Value} no offset {cr.Offset}", MessageType.Output); if (SimularProcessamentoBemSucedido()) { _messageWriter.Write($"Processou com sucesso e realizou commit do offset {cr.Offset}", MessageType.Output); consumer.Commit(cr); } else { _messageWriter.Write($"Erro no processamento pós consumo. Não realizou commit do offset {cr.Offset}", MessageType.Output); consumer.Subscribe(_topico); //forçar recomeçar a leitura a partir do último registro commited } await Task.Delay(1000); } catch (ConsumeException e) { _messageWriter.Write($"Falha no consumo: {e.Error.Reason}", MessageType.Input); } } } }
public async Task IniciarConsumo(CancellationTokenSource cts) { _brokerHelper.ConsumerConfig.GroupId = _consumerGroup; using (var consumer = new ConsumerBuilder <string, string>(_brokerHelper.ConsumerConfig).Build()) { consumer.Subscribe(_topico); while (!cts.IsCancellationRequested) { try { var cr = consumer.Consume(cts.Token); //sempre commita a mensagem retirada, independente de haver erro no resto do processo consumer.Commit(cr); _messageWriter.Write($"Consumiu (key : message): {cr.Message.Key} : {cr.Message.Value} no offset {cr.Offset}", MessageType.Output); if (SimularProcessamentoBemSucedido()) { _messageWriter.Write($"Processou com sucesso e realizou commit do offset {cr.Offset}", MessageType.Output); } else { _messageWriter.Write($"Erro no processamento pós consumo. Realizou commit do offset {cr.Offset} mesmo assim.", MessageType.Output); } await Task.Delay(1000); } catch (ConsumeException e) { _messageWriter.Write($"Falha no consumo: {e.Error.Reason}", MessageType.Input); } } } }
public Task Consume(string topic, CancellationToken token) { var task = Task.Run(async() => { var consumer = new ConsumerBuilder <TKey, TValue>(_consumerConfig) .SetValueDeserializer(new JsonDeserializer <TValue>()) .Build(); consumer.Subscribe(topic); while (!token.IsCancellationRequested) { ConsumeResult <TKey, TValue> consumResult = consumer.Consume(TimeSpan.FromMilliseconds(50)); if (consumResult != null && OnMessageConsumed != null) { await OnMessageConsumed.Invoke(consumResult.Message); if (!_commitOnConsume) { consumer.Commit(consumResult); } } } }, token); return(task); }
static void Main(string[] args) { //var consumergroup = Environment.GetEnvironmentVariable("CONSUMER_GROUP"); var topicName = Environment.GetEnvironmentVariable("TOPIC_NAME"); var brokerList = Environment.GetEnvironmentVariable("KAFKA_URL"); var config = new ConsumerConfig { GroupId = "KafkaConsumer", BootstrapServers = "localhost:9092" }; using (var consumer = new ConsumerBuilder <string, string>(config) .SetRebalanceHandler((obj, e) => { if (e.IsAssignment) { Console.WriteLine($"Assigned partitions: [{string.Join(", ", e.Partitions)}]"); } else { Console.WriteLine($"Revoked partitions: [{string.Join(", ", e.Partitions)}]"); } }).Build()) { consumer.Subscribe("ToDoList"); while (true) { ConsumeResult <string, string> consumeResult = consumer.Consume(); Console.WriteLine($"Received message at {consumeResult.TopicPartitionOffset}: {consumeResult.Value}"); consumer.Commit(); } } }
public static string ConsumeMessage(string serverUri, string topic, string groupId) { var config = new ConsumerConfig { GroupId = groupId, BootstrapServers = serverUri, EnableAutoCommit = false, StatisticsIntervalMs = 5000, SessionTimeoutMs = 6000, AutoOffsetReset = AutoOffsetReset.Earliest, EnablePartitionEof = true }; using (var c = new ConsumerBuilder <Ignore, string>(config).Build()) { c.Subscribe(topic); var cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; // prevent the process from terminating. cts.Cancel(); }; var cr = c.Consume(cts.Token); c.Commit(cr); return(cr.Message.Value); } }
private void Apply_Click(object sender, RoutedEventArgs _) { try { var config = new ConsumerConfig { GroupId = dataContext.GroupId, BootstrapServers = dataContext.EndPoint, EnableAutoCommit = false, }; var consumer = new ConsumerBuilder <string, string>(config).Build(); var topicPartitionOffset = new List <TopicPartitionOffset>(); foreach (var po in dataContext.TopicPartionList) { var offset = new Offset(po.Offset); if (offset != Offset.Unset) { topicPartitionOffset.Add(new TopicPartitionOffset(dataContext.Topic, po.Partition, offset)); } } consumer.Commit(topicPartitionOffset); MessageBox.Show($"Modify Result: success"); } catch (Exception e) { MessageBox.Show($"Modify Failed, Exception:{e.Message}"); } }
public void OAuthBearerToken_PublishConsume(string bootstrapServers) { LogToFileStartTest(); if (string.IsNullOrEmpty(bootstrapServers)) { // skip test if oauth enabled broker is not specified. return; } const string principal = "Tester"; var issuedAt = DateTimeOffset.UtcNow; var expiresAt = issuedAt.AddMinutes(5); var token = Util.GetUnsecuredJwt(principal, "requiredScope", issuedAt, expiresAt); void Callback(IClient client, string cfg) { client.OAuthBearerSetToken(token, expiresAt.ToUnixTimeMilliseconds(), principal); } var message = new Message <string, string> { Key = $"{Guid.NewGuid()}", Value = $"{DateTimeOffset.UtcNow:T}" }; var config = new ClientConfig { BootstrapServers = bootstrapServers, SecurityProtocol = SecurityProtocol.SaslPlaintext, SaslMechanism = SaslMechanism.OAuthBearer }; var producerConfig = new ProducerConfig(config); var consumerConfig = new ConsumerConfig(config) { GroupId = $"{Guid.NewGuid()}", AutoOffsetReset = AutoOffsetReset.Earliest }; var producer = new ProducerBuilder <string, string>(producerConfig) .SetOAuthBearerTokenRefreshHandler(Callback) .Build(); var consumer = new ConsumerBuilder <string, string>(consumerConfig) .SetOAuthBearerTokenRefreshHandler(Callback) .Build(); consumer.Subscribe(partitionedTopic); producer.Produce(partitionedTopic, message); producer.Flush(TimeSpan.FromSeconds(30)); var received = consumer.Consume(TimeSpan.FromSeconds(30)); Assert.NotNull(received); consumer.Commit(received); Assert.Equal(message.Key, received.Message.Key); Assert.Equal(message.Value, received.Message.Value); LogToFileEndTest(); }
public void Consume() { string serverAddress = configurationRoot.GetSection("Kafka:ServerAddress").Get <string>(); string topic = configurationRoot.GetSection("Kafka:Topic").Get <string>(); string port = configurationRoot.GetSection("Kafka:Port").Get <string>(); var config = new ConsumerConfig { BootstrapServers = serverAddress + ":" + port.Trim(), AutoOffsetReset = AutoOffsetReset.Earliest, GroupId = "my-consumer-group" }; using (var c = new ConsumerBuilder <Null, string>(config).Build()) { c.Subscribe(topic); CancellationTokenSource cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; cts.Cancel(); }; try { while (true) { try { var cr = c.Consume(cts.Token); City city = JsonConvert.DeserializeObject <City>(cr.Value); string deliveryMessage = string.Format("Delivery succeeded for City is {0} at {1}.", city.Name, cr.TopicPartitionOffset); //make a request to OpenWeatherApi for getting weather conditions WeatherInfo weatherInfo = weatherService.GetWeatherInfo(city); string weatherMessage = $"The weather is {weatherInfo.weather[0].description.ToUpper()} at {city.Name.ToUpper()}"; Console.WriteLine(deliveryMessage + " " + weatherMessage); } catch (ConsumeException ex) { Console.WriteLine($"Error occured: {ex.Error.Reason}"); } catch (Exception ex) { Console.WriteLine($"Error occured: {ex.ToString()}"); } } c.Commit(); } catch (OperationCanceledException ex) { Console.WriteLine("Error: ", ex.Message); } } }
public ConsumeResult <TKey, TValue> Consume(string Topic, int CommitPeriod) { //_consumerConfig.EnableAutoCommit = false; _consumerConfig.EnablePartitionEof = true; ConsumeResult <TKey, TValue> consumeResult = null; using (var consumer = new ConsumerBuilder <TKey, TValue>(_consumerConfig) .SetErrorHandler((_, e) => _logger.LogError($"Error: {e.Reason}")) //.SetStatisticsHandler((_, json) => _logger.LogInformation($"Statistics: {json}")) .Build()) { consumer.Subscribe(Topic); try { try { consumeResult = consumer.Consume(); if (consumeResult.IsPartitionEOF) { _logger.LogInformation( $"Reached end of topic {consumeResult.Topic}, partition {consumeResult.Partition}, offset {consumeResult.Offset}."); } _logger.LogInformation($"Received message at {consumeResult.TopicPartitionOffset}: {consumeResult.Value}"); if (consumeResult.Offset % CommitPeriod == 0) { try { if (_consumerConfig.EnableAutoCommit.HasValue && !_consumerConfig.EnableAutoCommit.Value) { consumer.Commit(consumeResult); } } catch (KafkaException e) { _logger.LogError($"Commit error: {e.Error.Reason}"); } } } catch (ConsumeException e) { _logger.LogError($"Consume error: {e.Error.Reason}"); } } catch (OperationCanceledException) { _logger.LogError("Closing consumer."); consumer.Close(); } } return(consumeResult); }
private void ConsumeAsync(string topic, bool commitOnError, CancellationToken cancellationToken) { using (var consumer = new ConsumerBuilder <Ignore, string>(_consumerConfig).Build()) { consumer.Subscribe(topic); try { while (!cancellationToken.IsCancellationRequested) { ConsumeResult <Ignore, string> result = null; try { result = consumer.Consume(cancellationToken); } catch (ConsumeException e) { _logger.LogError(e, e.Message); } if (result == null) { continue; } try { var theInstance = (IMessagingEvent)Activator.CreateInstance(_messagingEvent.subscriptions[topic], _serviceProvider); theInstance.Process(result.Value); } catch (OperationCanceledException e) { throw e; } catch (Exception e) { if (!commitOnError) { throw e; } } consumer.Commit(result); } } catch (OperationCanceledException e) { _logger.LogWarning(e, $"Stopped consuming topic \"{topic}\"."); } finally { consumer.Close(); } } }
public async Task ProcessPayload([TimerTrigger("*/5 * * * * *")] TimerInfo timerInfo, ILogger logger) { try { var consumerConfig = new ConsumerConfig { BootstrapServers = _kafkaSettings.KafkaServer, SaslMechanism = SaslMechanism.ScramSha256, SecurityProtocol = SecurityProtocol.SaslSsl, SaslUsername = _kafkaSettings.UserName, SaslPassword = _kafkaSettings.Password, GroupId = _kafkaSettings.KafkaConsumerGroup, AutoOffsetReset = AutoOffsetReset.Earliest, EnableAutoCommit = false, EnablePartitionEof = true, EnableSslCertificateVerification = false }; using (var consumer = new ConsumerBuilder <Ignore, string>(consumerConfig).Build()) { consumer.Subscribe(_kafkaSettings.KafkaTopic); logger.LogDebug("subscribed on {topic}", _kafkaSettings.KafkaTopic); try { while (true) { var payload = consumer.Consume(); if (payload.IsPartitionEOF) { logger.LogInformation("no record found"); break; } var parsedPayload = ParsePayload(payload.Message.Value); await SaveData( parsedPayload.WatchRequest, parsedPayload.WeatherInfo, parsedPayload.LocationInfo, parsedPayload.ExchangeRateInfo); consumer.Commit(); logger.LogInformation("id: {id}, request: {@WatchRequest}", payload.Offset.Value, parsedPayload.WatchRequest); } } catch (Exception ex) { logger.LogWarning(ex, "Processing exception"); } } } catch (Exception ex) { logger.LogWarning(ex, "Consumer exception"); } }
public void Receber(Mensagem objMensagem) { var config = new ConsumerConfig { BootstrapServers = string.Join(',', bootstrapServers), GroupId = grupo, AutoOffsetReset = AutoOffsetReset.Latest, EnableAutoCommit = false }; CancellationTokenSource cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; cts.Cancel(); }; try { using (var consumer = new ConsumerBuilder <Ignore, string>(config).Build()) { consumer.Subscribe(nomeTopico); try { while (true) { var cr = consumer.Consume(cts.Token); Console.WriteLine($"Mensagem consumida: {cr.Message.Value} particao: {cr.TopicPartitionOffset}"); Mensagem msgRec = JsonSerializer.Deserialize <Mensagem>(cr.Message.Value); if (objMensagem.id == msgRec.id) { consumer.Commit(); Console.WriteLine($"Mensagem recebida: {cr.Message.Value} particao: {cr.TopicPartitionOffset}"); consumer.Close(); break; } } } catch (OperationCanceledException) { consumer.Close(); Console.WriteLine("Cancelada a execução do Consumer..."); } } } catch (Exception ex) { Console.WriteLine($"Exceção: {ex.GetType().FullName} | " + $"Mensagem: {ex.Message}"); } }
public void Receber() { var config = new ConsumerConfig { BootstrapServers = string.Join(',', bootstrapServers), GroupId = grupo, AutoOffsetReset = AutoOffsetReset.Earliest }; CancellationTokenSource cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; cts.Cancel(); }; try { using (var consumer = new ConsumerBuilder <Ignore, string>(config).Build()) { consumer.Subscribe(nomeTopico); try { while (true) { var cr = consumer.Consume(cts.Token); consumer.Commit(); Console.WriteLine($"Mensagem recebida: {cr.Message.Value} particao: {cr.TopicPartitionOffset}"); Mensagem msgRec = JsonSerializer.Deserialize <Mensagem>(cr.Message.Value); if (Program.arrMensagem.Keys.FirstOrDefault(m => m == msgRec.id) == null) { //Aqui é o processamento da mensagem, pode ser enviado para outras camadas e depois entregue para a lista que fará o retorno msgRec.status = "OK"; msgRec.dataRetorno = DateTime.Now; Program.arrMensagem.Add(msgRec.id, msgRec); } } } catch (OperationCanceledException) { consumer.Close(); Console.WriteLine("Cancelada a execução do Consumer..."); } } } catch (Exception ex) { Console.WriteLine($"Exceção: {ex.GetType().FullName} | " + $"Mensagem: {ex.Message}"); } }
static async Task Main(string[] args) { try { var config = new ConsumerConfig { // BootstrapServers = "127.0.0.1:9092", // BootstrapServers = "192.168.16.133:9093", BootstrapServers = "39.99.144.108:8807", // Disable auto-committing of offsets. EnableAutoCommit = false, GroupId = "consumer-group-1" }; using (var consumer = new ConsumerBuilder <Ignore, string>(config).Build()) { CancellationTokenSource cancellationToken = new CancellationTokenSource(); consumer.Subscribe("topic_messages"); Console.CancelKeyPress += (_, e) => { e.Cancel = true; cancellationToken.Cancel(); }; while (true) { try { var consumeResult = consumer.Consume(); //var consumeResult = consumer.Consume(cancellationToken.Token); Console.WriteLine($"Consumed message '{consumeResult.Message.Value}' at: '{consumeResult.TopicPartitionOffset}'."); consumer.Commit(new List <TopicPartitionOffset>() { consumeResult.TopicPartitionOffset }); //手动提交偏移 } catch (ConsumeException ce) { Console.WriteLine($"consumer error: {ce.Error.Reason}"); } } consumer.Close(); } } catch (System.Exception ex) { Console.WriteLine(ex.ToString()); } }
static void runConsumerManual() { var config = new ConsumerConfig { BootstrapServers = brokerList, GroupId = groupId, EnableAutoCommit = false }; bool cancelled = false; int noRecordsCount = 0; using (var consumer = new ConsumerBuilder <Ignore, string>(config).Build()) { consumer.Subscribe(topicName); var cancelToken = new CancellationTokenSource(); ConsumeResult <Ignore, string> consumeResult = null; while (!cancelled) { consumeResult = consumer.Consume(cancelToken.Token); noRecordsCount++; // handle message Console.WriteLine($"Consumer Record:(Key: {consumeResult.Message.Key}, Value: {consumeResult.Message.Value} Partition: {consumeResult.TopicPartition.Partition} Offset: {consumeResult.TopicPartitionOffset.Offset}"); if (consumeResult.Offset % 50 == 0) { consumer.Commit(consumeResult); } } // commit the rest consumer.Commit(consumeResult); consumer.Close(); } }
public static void Consume(string groupId, string consumerId) { var conf = new ConsumerConfig { GroupId = groupId, BootstrapServers = _bootstrapServers, // Note: The AutoOffsetReset property determines the start offset in the event // there are not yet any committed offsets for the consumer group for the // topic/partitions of interest. By default, offsets are committed // automatically, so in this example, consumption will only start from the // earliest message in the topic 'my-topic' the first time you run the program. AutoOffsetReset = AutoOffsetReset.Earliest, EnableAutoCommit = false }; using (var c = new ConsumerBuilder <Ignore, string>(conf).Build()) { c.Subscribe("users"); CancellationTokenSource cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; // prevent the process from terminating. cts.Cancel(); }; try { while (true) { try { var cr = c.Consume(cts.Token); Console.WriteLine($"{consumerId}:Consumed message '{cr.Value}' at: '{cr.TopicPartitionOffset}'."); var result = c.Commit(); //Console.WriteLine($"{consumerId}:Commited message '{result.}' at: '{cr.TopicPartitionOffset}'."); } catch (ConsumeException e) { Console.WriteLine($"{consumerId}:Error occured: {e.Error.Reason}"); } } } catch (OperationCanceledException) { // Ensure the consumer leaves the group cleanly and final offsets are committed. c.Close(); } } }
private static void Main(string[] args) { Console.WriteLine("TimeOff Results Terminal\n"); var configuration = new ConfigurationBuilder() .AddJsonFile("appsettings.json", true, true) .Build(); var schemaRegistryConfig = configuration.GetSection(nameof(SchemaRegistryConfig)).Get <SchemaRegistryConfig>(); var consumerConfig = configuration.GetSection(nameof(ConsumerConfig)).Get <ConsumerConfig>(); // Read messages from start if no commit exists. consumerConfig.AutoOffsetReset = AutoOffsetReset.Earliest; using var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig); using var consumer = new ConsumerBuilder <string, LeaveApplicationProcessed>(consumerConfig) .SetKeyDeserializer(new AvroDeserializer <string>(schemaRegistry).AsSyncOverAsync()) .SetValueDeserializer(new AvroDeserializer <LeaveApplicationProcessed>(schemaRegistry).AsSyncOverAsync()) .SetErrorHandler((_, e) => Console.WriteLine($"Error: {e.Reason}")) .Build(); { try { Console.WriteLine(""); consumer.Subscribe(ApplicationConstants.LeaveApplicationResultsTopicName); while (true) { var result = consumer.Consume(); var leaveRequest = result.Message.Value; Console.WriteLine( $"Received message: {result.Message.Key} Value: {JsonSerializer.Serialize(leaveRequest)}"); consumer.Commit(result); consumer.StoreOffset(result); Console.WriteLine("\nOffset committed"); Console.WriteLine("----------\n\n"); } } catch (ConsumeException e) { Console.WriteLine($"Consume error: {e.Error.Reason}"); } finally { consumer.Close(); } } }
public virtual async Task Run() { using (var consumer = new ConsumerBuilder <TKey, string>(_consumerConfig).Build()) { Logger.LogInfo("Subscribing to topic"); consumer.Subscribe(_topic); Logger.LogInfo("Subscribed"); Console.CancelKeyPress += (_, e) => { e.Cancel = true; // prevent the process from terminating. CTS.Cancel(); Logger.LogInfo("Stopping consumer..."); }; try { while (!CTS.IsCancellationRequested) { try { var cr = consumer.Consume(CTS.Token); Logger.LogInfo($"Consumed message '{cr.Key}' at: '{cr.TopicPartitionOffset}'."); var value = JsonConvert.DeserializeObject <TValue>(cr.Value); await ProcessRecord(cr.Key, value); if (_consumerConfig.EnableAutoCommit == false) { consumer.Commit(); } } catch (ConsumeException e) { Logger.LogError($"Error occured: {e.Error.Reason}"); } } } catch (OperationCanceledException) { // Ensure the consumer leaves the group cleanly and final offsets are committed. consumer.Close(); } } }
private static void CreateConsumer(string bootstrapServers, string groupId, string topic) { using var consumer = new ConsumerBuilder <Ignore, Ignore>(new ConsumerConfig { BootstrapServers = bootstrapServers, GroupId = groupId, EnableAutoCommit = false, AutoOffsetReset = AutoOffsetReset.Earliest }).Build(); consumer.Subscribe(topic); var cr = consumer.Consume(TimeSpan.FromSeconds(10)); consumer.Commit(cr); consumer.Close(); }