public async Task MultiItemTriggerTenPartitions( [KafkaTrigger("%BROKER_LIST%", "%TOPIC%", ConsumerGroup = "%CONSUMER_GROUP%")] KafkaEventData <string>[] events /* Azure Functionsで動作させるときはこれを使う。 * ,ILogger log */ ) { _telemetryClient.TrackTrace("function start", SeverityLevel.Information); //_logger.LogInformation("function start", SeverityLevel.Information); //var rets = new System.Collections.Generic.List<ConsumerResult>(); foreach (var kafkaEvent in events) { var topicData = JsonConvert.DeserializeObject <RootObject>(kafkaEvent.Value); var now = DateTime.UtcNow; var consumerResult = new ConsumerResult() { PartitionKey = GetInstanceName(), //RowKey = Guid.NewGuid().ToString(), //RowKey = topicData.TransactionId.ToString().PadLeft(8, '0'), //RowKey = topicData.TransactionId, consumeTime = now.ToString("yyyy-MM-dd-HH:mm:ss.fff"), //timespan = (now - topicData.DateTime), partition = kafkaEvent.Partition, topic = kafkaEvent.Topic, topicTime = kafkaEvent.Timestamp.ToString("yyyy-MM-dd-HH:mm:ss.fff"), offset = kafkaEvent.Offset.ToString() }; foreach (var i in topicData.Accidents) { try { var jsonstr = JsonConvert.SerializeObject(i); //_logger.LogInformation(jsonstr); _telemetryClient.TrackTrace(jsonstr, SeverityLevel.Information); var content = new StringContent(jsonstr, Encoding.UTF8, "application/json"); var ret = await client.PostAsync(_configuration.GetValue <string>("BackEnd_URL"), content); } catch (Exception ex) { _telemetryClient.TrackTrace(ex.Message, SeverityLevel.Error); //_logger.LogError(ex.Message); } } } string GetInstanceName() { var hostname = Environment.GetEnvironmentVariable("COMPUTERNAME") ?? string.Empty; if (string.IsNullOrEmpty(hostname)) { hostname = Environment.MachineName; } return(hostname); } }
//For integration test purpose. public ConsumerResult CreateConsumerAndConsumeSingleMessage(ConsumerConfig consumerConfig, string topic, CancellationToken cts) { ConsumerResult result = null; var cb = new ConsumerBuilder <string, string>(consumerConfig); using (var consumer = cb.Build()) { consumer.Subscribe(topic); try { var cr = consumer.Consume(cts); var offset = cr.TopicPartitionOffset; result = new ConsumerResult { Message = cr.Message.Value, TopicOffset = offset }; } catch (Exception e) { Console.WriteLine(e.Message); consumer.Close(); } } return(result); }
public static void MultiItemTriggerTenPartitions( [KafkaTrigger(Broker, Topic, ConsumerGroup = "myConsumerGroup")] KafkaEventData <string>[] events //[Table("kafkaExtensionTable02")] ICollector<ConsumerResult> outputTable, //MyLogger //Logger log ) { //log.Info("function start"); var rets = new System.Collections.Generic.List <ConsumerResult>(); foreach (var kafkaEvent in events) { var topicData = JsonConvert.DeserializeObject <Wng>(kafkaEvent.Value); var now = DateTime.UtcNow; var consumerResult = new ConsumerResult() { PartitionKey = GetInstanceName(), //RowKey = Guid.NewGuid().ToString(), RowKey = topicData.TransactionId.ToString().PadLeft(8, '0'), consumeTime = now.ToString("yyyy-MM-dd-HH:mm:ss.fff"), timespan = (now - topicData.OccurrenceDate), partition = kafkaEvent.Partition, topic = kafkaEvent.Topic, topicTime = kafkaEvent.Timestamp.ToString("yyyy-MM-dd-HH:mm:ss.fff"), offset = kafkaEvent.Offset.ToString() }; log.Info(JsonConvert.SerializeObject(consumerResult)); log.Info(kafkaEvent.Value.Substring(0, 100)); /* * try * { * outputTable.Add(consumerResult); * } * catch (Exception e) * { * log.LogInformation(e.Message); * } */ //log.Info("function end"); } string GetInstanceName() { var hostname = Environment.GetEnvironmentVariable("COMPUTERNAME") ?? string.Empty; if (string.IsNullOrEmpty(hostname)) { hostname = Environment.MachineName; } return(hostname); } }
protected void consumirMensagem() { consumerService.Subscribe("IMPORTED_FILE_AVRO", "IMPORTED_FILE_AVRO_GROUP"); while (true) { ConsumerResult result = consumerService.ConsumeMessage(); if (String.IsNullOrEmpty(result.Message)) { return; } var response = JsonConvert.DeserializeObject <AvroMessage>(result.Message); Console.WriteLine("Response: ", response); } }