public async Task Kafka( [KafkaTrigger( "notification.services.tonlabs.io:29092", "notifications-9", ConsumerGroup = "$Default", Protocol = BrokerProtocol.SaslPlaintext, AuthenticationMode = BrokerAuthenticationMode.ScramSha512, Username = "******", Password = "******")] KafkaEventData <string> kafkaEvent, ILogger log) { var hash = string.Empty; var nonce = string.Empty; var encrypted = string.Empty; try { var input = kafkaEvent.Value.Split(' '); hash = input[0]; nonce = input[1].Base64ToHex(); encrypted = input[2]; var eventReceiver = _eventReceivers .Query <EventReceiver>($"PartitionKey eq '{hash}'") .Single(); if (eventReceiver.Url != "https://freeton.org" && !eventReceiver.IsVerified) { return; } var context = new Context().WithLogger(log); var client = _httpClientFactory.CreateClient("pollyClient"); var request = new HttpRequestMessage(HttpMethod.Post, eventReceiver.Url); request.Content = new StringContent(encrypted, Encoding.UTF8, "text/plain"); request.SetPolicyExecutionContext(context); var response = await client.SendAsync(request); log.LogInformation(nameof(Kafka), new[] { new { hash, statusCode = response.StatusCode } }); await _kafkaMessages.AddEntityAsync(new KafkaMessage { PartitionKey = hash, RowKey = hash, Nonce = nonce, Encrypted = encrypted }); } catch (Exception ex) { log.LogError( ex, nameof(Kafka), new[] { new { hash, nonce, encrypted } }); throw; } }
public void SingleDispatch_GetBindingData_Should_Create_Data_From_Kafka_Event() { var kafkaEventData = new KafkaEventData <string, string>() { Key = "1", Offset = 100, Partition = 2, Timestamp = new DateTime(2019, 1, 10, 9, 21, 0, DateTimeKind.Utc), Topic = "myTopic", Value = "Nothing", }; var strategy = new KafkaTriggerBindingStrategy <string, string>(); var binding = strategy.GetBindingData(KafkaTriggerInput.New(kafkaEventData)); Assert.Equal("1", binding["Key"]); Assert.Equal(100L, binding["Offset"]); Assert.Equal(2, binding["Partition"]); Assert.Equal(new DateTime(2019, 1, 10, 9, 21, 0, DateTimeKind.Utc), binding["Timestamp"]); Assert.Equal("myTopic", binding["Topic"]); // lower case too Assert.Equal("1", binding["key"]); Assert.Equal(100L, binding["offset"]); Assert.Equal(2, binding["partition"]); Assert.Equal(new DateTime(2019, 1, 10, 9, 21, 0, DateTimeKind.Utc), binding["timestamp"]); Assert.Equal("myTopic", binding["topic"]); }
public static async Task Run( [TimerTrigger("0 */1 * * * *")] TimerInfo myTimer, [Kafka("LocalBroker", "tibiaTopic")] IAsyncCollector <KafkaEventData <string> > events, ILogger log) { log.LogInformation($"C# Timer trigger function executed at: {DateTime.Now}"); var random = new Random(); int randomNumber = random.Next(3300, 3600); var client = new HttpClient(); var response = await client.GetAsync($"https://api.tibiadata.com/v2/news/{randomNumber}.json"); var content = await response.Content.ReadAsStringAsync(); try { var kafkaEvent = new KafkaEventData <string>() { Value = content, }; await events.AddAsync(kafkaEvent); } catch (Exception ex) { throw ex; } log.LogInformation($"CONTENT RETRIEVED: {content}"); }
public static void RunKafka( [KafkaTrigger("my-confluent-oss-cp-kafka-headless.default.svc:9092", "cars.reply", ConsumerGroup = "cars-saver")] KafkaEventData <string, string> kafkaEvent) { var test = $"Custom deserialised car from batch: { kafkaEvent.Value }"; Console.WriteLine(test); }
public static async Task SendProtobufWithStringKeyTopic( string topic, IEnumerable <string> keys, IEnumerable <string> content, [Kafka(BrokerList = "LocalBroker", KeyType = typeof(string), ValueType = typeof(ProtoUser))] IAsyncCollector <KafkaEventData> output) { var colors = new[] { "red", "blue", "green" }; var keysEnumerator = keys.GetEnumerator(); var i = 0; foreach (var c in content) { keysEnumerator.MoveNext(); var message = new KafkaEventData() { Key = keysEnumerator.Current, Topic = topic, Value = new ProtoUser() { Name = c, FavoriteColor = colors[i % colors.Length], FavoriteNumber = i, }, }; await output.AddAsync(message); i++; } }
public static async Task SendAvroWithStringKeyTopic( string topic, IEnumerable <string> keys, IEnumerable <string> content, [Kafka(BrokerList = "LocalBroker", KeyType = typeof(string), ValueType = typeof(MyAvroRecord))] IAsyncCollector <KafkaEventData> output) { var keysEnumerator = keys.GetEnumerator(); foreach (var c in content) { keysEnumerator.MoveNext(); var message = new KafkaEventData() { Key = keysEnumerator.Current, Topic = topic, Value = new MyAvroRecord() { ID = c, Ticks = DateTime.UtcNow.Ticks, }, }; await output.AddAsync(message); } }
public void ConfluentCloudStringTrigger( [KafkaTrigger( "KafkaBrokerUrl", "materials", ConsumerGroup = "cg-01", Protocol = BrokerProtocol.SaslSsl, AuthenticationMode = BrokerAuthenticationMode.Plain, Username = "******", Password = "******", SslCaLocation = "confluent_cloud_cacert.pem")] KafkaEventData <string> kafkaEvent, ILogger logger) { string kafkaEventValue = kafkaEvent.Value.ToString(); try { JObject obj = JObject.Parse(kafkaEventValue); _materialService.AddAsync(obj); } catch (JsonReaderException ex) { //TODO: add retries and handle the commit of the Kafka event logger.LogError(ex, "The Kafka event value [{kafkaEventValue}] is not valid"); } }
public static void Run([KafkaTrigger( "KafkaConnection", "topic-acoes", ConsumerGroup = "topic-acoes-mongo")] KafkaEventData <string> kafkaEvent, ILogger log) { string dados = kafkaEvent.Value.ToString(); log.LogInformation($"KafkaAcoesTopicTrigger - Dados: {dados}"); var acao = JsonSerializer.Deserialize <Acao>(dados, new JsonSerializerOptions() { PropertyNameCaseInsensitive = true }); var validationResult = new AcaoValidator().Validate(acao); if (validationResult.IsValid) { log.LogInformation($"KafkaAcoesTopicTrigger - Dados pós formatação: {JsonSerializer.Serialize(acao)}"); AcoesRepository.Save(acao); log.LogInformation("KafkaAcoesTopicTrigger - Ação registrada com sucesso!"); } else { log.LogInformation("KafkaAcoesTopicTrigger - Dados inválidos para a Ação"); } }
public static async Task Produce_AsyncCollector_Avro_With_String_key( string topic, IEnumerable <string> keys, IEnumerable <string> content, [Kafka(BrokerList = "LocalBroker")] IAsyncCollector <KafkaEventData <string, MyAvroRecord> > output) { var keysEnumerator = keys.GetEnumerator(); foreach (var c in content) { keysEnumerator.MoveNext(); var message = new KafkaEventData <string, MyAvroRecord>() { Key = keysEnumerator.Current, Topic = topic, Value = new MyAvroRecord() { ID = c, Ticks = DateTime.UtcNow.Ticks, }, }; await output.AddAsync(message); } }
public async Task Produce_And_Consume_With_Headers() { var input = Enumerable.Range(1, 10) .Select(x => { var eventData = new KafkaEventData <string> { Value = x.ToString() }; for (var i = 0; i < x; i++) { eventData.Headers.Add("testHeader", Encoding.UTF8.GetBytes("testValue" + i)); } return(eventData); }); var output = await ProduceAndConsumeAsync(input); foreach (var inputEvent in input) { var outputEvent = output.SingleOrDefault(x => x.Value == inputEvent.Value); Assert.NotNull(outputEvent); Assert.Equal(inputEvent.Headers.Count, outputEvent.Headers.Count); Assert.Equal("testValue0", Encoding.UTF8.GetString(outputEvent.Headers.GetFirst("testHeader"))); Assert.Throws <NotSupportedException>(() => outputEvent.Headers.Remove("testHeader")); } }
public static void Run([KafkaTrigger( "BrokerKafka", "topic-kafka", ConsumerGroup = "topic-kafka-group0")] KafkaEventData <string> kafkaEvent, ILogger log) { log.LogInformation($"Dados recebidos via Topic do Apache Kafka: {kafkaEvent.Value.ToString()}"); }
public static void Run([KafkaTrigger( "BrokerKafka", "topic-azure-functions", ConsumerGroup = "topic-azure-functions-group")] KafkaEventData <string> kafkaEvent, ILogger log) { log.LogInformation("Azure Functions + Apache Kafka | " + $"Mensagem recebida: {kafkaEvent.Value.ToString()}"); }
public void When_Event_Has_Key_Of_Wrong_Type_Should_Fail() { var eventData = new KafkaEventData <string, string>("test", "test"); var builder = new KafkaMessageBuilder <int, string>(); var ex = Assert.Throws <ArgumentException>(() => builder.BuildFrom(eventData)); Assert.StartsWith("Key value is not of the expected type", ex.Message); }
public static KafkaTriggerInput New(KafkaEventData eventData) { return(new KafkaTriggerInput { Events = new[] { eventData }, _selector = 0, }); }
public void When_KafkaEventData_Is_Created_From_IKafkaEventData_The_Headers_Keep_The_Writeability() { var eventDataWithReadWrite = new KafkaEventData <string>(new KafkaEventData <string>()); var eventDataWithReadOnly = new KafkaEventData <string>(new KafkaEventData <string>(KafkaEventDataHeaders.EmptyReadOnly)); var ex = Record.Exception(() => eventDataWithReadWrite.Headers.Add("test", null)); Assert.Null(ex); Assert.Throws <NotSupportedException>(() => eventDataWithReadOnly.Headers.Add("test", null)); }
public void When_KeyedKafkaEventData_Is_Created_With_Empty_Constructor_The_Headers_Can_Be_Set() { var eventData = new KafkaEventData <string, string> { Headers = { { "testKey", Encoding.UTF8.GetBytes("testValue") } } }; Assert.Equal("testValue", Encoding.UTF8.GetString(eventData.Headers.GetFirst("testKey"))); }
public void SampleConsumer([KafkaTrigger( "omv.serveblog.net:29092", "devboost.delivery.pedido", ConsumerGroup = "CriarPedido", Protocol = BrokerProtocol.Plaintext)] KafkaEventData <string> kafkaEvent, ILogger logger) { var valuePedido = kafkaEvent.Value.ToString(); logger.LogInformation(valuePedido); _pedidoService.RealizarPedido(Token, valuePedido).Wait(); }
public void PagamentoConsumer([KafkaTrigger( "omv.serveblog.net:29092", "devboost.delivery.pagamento.kafka", ConsumerGroup = "AtualziarPagamento", Protocol = BrokerProtocol.Plaintext)] KafkaEventData <string> kafkaEvent, ILogger logger) { var valueStatusPagamento = kafkaEvent.Value.ToString(); logger.LogInformation(valueStatusPagamento); _pagamentoService.AtualizaStatusPedido(Token, valueStatusPagamento).Wait(); }
public void When_Event_Has_No_Headers_No_Headers_Are_Added_To_Message() { var eventData = new KafkaEventData <string> { }; var builder = new KafkaMessageBuilder <string, string>(); var msg = builder.BuildFrom(eventData); Assert.NotNull(msg); Assert.Null(msg.Headers); }
public static void Run([KafkaTrigger( "BrokerKafka", "topic-testes", ConsumerGroup = "azfunctions0", Protocol = BrokerProtocol.SaslSsl, AuthenticationMode = BrokerAuthenticationMode.Plain, Username = "******", Password = "******" )] KafkaEventData <string> kafkaEvent, ILogger log) { log.LogInformation("Azure Functions + Apache Kafka + Azure Event Hubs | " + $"Mensagem recebida: {kafkaEvent.Value.ToString()}"); }
public void SampleConsumer( [KafkaTrigger( "LocalBroker", "%EHTOPIC%", ConsumerGroup = "$Default", Username = "******", Password = "******", Protocol = BrokerProtocol.SaslSsl, AuthenticationMode = BrokerAuthenticationMode.Plain)] KafkaEventData <string> kafkaEvent, ILogger logger) { logger.LogInformation(kafkaEvent.Value.ToString()); }
public void When_KeyedKafkaEventData_Is_Created_From_ConsumeResult_Without_Headers_The_Headers_Are_Static() { var message = new Message <string, string>(); var consumeResult = new ConsumeResult <string, string> { Message = message }; var eventData = new KafkaEventData <string, string>(consumeResult); Assert.Same(KafkaEventDataHeaders.EmptyReadOnly, eventData.Headers); Assert.Throws <NotSupportedException>(() => eventData.Headers.Add("test", null)); }
public async Task PedidoConsumerTrigger( [KafkaTrigger( "%BootstrapServers%", "%Topic%", ConsumerGroup = "%ConsumerGroup%")] KafkaEventData <string> kafkaEvent, ILogger logger) { logger.LogInformation(kafkaEvent.Value.ToString()); var @event = JsonConvert.DeserializeObject <PedidoCriadoEvent>(kafkaEvent.Value); await _pedidoCommand.PedidoAsync(@event); }
public static void ConfluentCloudStringTrigger( [KafkaTrigger( "BootstrapServer", "users", ConsumerGroup = "<ConsumerGroup>", Protocol = BrokerProtocol.SaslSsl, AuthenticationMode = BrokerAuthenticationMode.Plain, Username = "******", Password = "******", SslCaLocation = "confluent_cloud_cacert.pem")] KafkaEventData <string> kafkaEvent, ILogger logger) { logger.LogInformation(kafkaEvent.Value.ToString()); }
public void Run([KafkaTrigger( "BrokerKafka", "topic-acoes", ConsumerGroup = "processar_acoes-sql", Protocol = BrokerProtocol.SaslSsl, AuthenticationMode = BrokerAuthenticationMode.Plain, Username = "******", Password = "******" )] KafkaEventData <string> kafkaEvent, ILogger log) { string dados = kafkaEvent.Value.ToString(); log.LogInformation($"ProcessarAcoesSql - Dados: {dados}"); Acao acao = null; try { acao = JsonSerializer.Deserialize <Acao>(dados, new JsonSerializerOptions() { PropertyNameCaseInsensitive = true }); } catch { log.LogError("ProcessarAcoesSql - Erro durante a deserializacao!"); } if (acao != null) { var validationResult = new AcaoValidator().Validate(acao); if (validationResult.IsValid) { log.LogInformation($"ProcessarAcoesSql - Dados pos formatacao: {JsonSerializer.Serialize(acao)}"); _repository.Save(acao); log.LogInformation("ProcessarAcoesSql - Acao registrada com sucesso!"); } else { log.LogError("ProcessarAcoesSql - Dados invalidos para a Acao"); foreach (var error in validationResult.Errors) { log.LogError($"ProcessarAcoesSql - {error.ErrorMessage}"); } } } }
public void When_KeyedKafkaEventData_Is_Created_From_ConsumeResult_The_Headers_Can_Not_Be_Set() { var message = new Message <string, string> { Headers = new Headers() }; message.Headers.Add("testKey", null); var consumeResult = new ConsumeResult <string, string> { Message = message }; var eventData = new KafkaEventData <string, string>(consumeResult); Assert.Equal(1, eventData.Headers.Count); Assert.Throws <NotSupportedException>(() => eventData.Headers.Add("test", null)); }
public static async Task Produce_AsyncCollector_String_Without_Key( string topic, IEnumerable <string> content, [Kafka(BrokerList = "LocalBroker")] IAsyncCollector <KafkaEventData <string> > output) { foreach (var c in content) { var message = new KafkaEventData <string>() { Topic = topic, Value = c, }; await output.AddAsync(message); } }
public static async Task SendToStringTopic( string topic, IEnumerable <string> content, [Kafka(BrokerList = "LocalBroker")] IAsyncCollector <KafkaEventData> output) { foreach (var c in content) { var message = new KafkaEventData() { Topic = topic, Value = c, }; await output.AddAsync(message); } }
public void Run([KafkaTrigger( "ApacheKafkaConnection", "topic-acoes", ConsumerGroup = "processador-acoes-mongo")] KafkaEventData <string> kafkaEvent, ILogger log) { string dados = kafkaEvent.Value.ToString(); log.LogInformation($"ProcessarAcoes - Dados: {dados}"); Acao acao = null; try { acao = JsonSerializer.Deserialize <Acao>(dados, new JsonSerializerOptions() { PropertyNameCaseInsensitive = true }); } catch { log.LogError("ProcessarAcoes - Erro durante a deserializacao!"); } if (acao != null) { var validationResult = new AcaoValidator().Validate(acao); if (validationResult.IsValid) { log.LogInformation($"ProcessarAcoes - Dados pos formatacao: {JsonSerializer.Serialize(acao)}"); _repository.Save(acao); log.LogInformation("ProcessarAcoes - Acao registrada com sucesso!"); } else { log.LogError("ProcessarAcoes - Dados invalidos para a Acao"); foreach (var error in validationResult.Errors) { log.LogError($"ProcessarAcoes - {error.ErrorMessage}"); } } } }
public async Task PedidoStatusTrigger( [KafkaTrigger( "%BootstrapServers%", "%TopicPedidoAtualizado%", ConsumerGroup = "%ConsumerGroup%")] KafkaEventData <string> kafkaEvent, ILogger logger) { logger.LogInformation(kafkaEvent.Value.ToString()); try { var @event = JsonConvert.DeserializeObject <PedidoAtualizadoEvent>(kafkaEvent.Value); await _pedidoEntregaService.AtualizarPedidoAsync(@event); } catch (Exception ex) { logger.LogError(ex.Message); } }