public Task ProcessEventsAsync(PartitionContext context, IEnumerable <EventData> messages) { foreach (var eventData in messages) { var data = Encoding.UTF8.GetString(eventData.Body.Array, eventData.Body.Offset, eventData.Body.Count); EventRaw er = new EventRaw { Raw = data }; if (eventData.Properties != null && eventData.Properties.Count >= 0) { foreach (string key in eventData.Properties.Keys) { er.Properties.Add(key, eventData.Properties[key].ToString()); } } er.SystemProperties.Add("x-opt-sequence-number", eventData.SystemProperties.SequenceNumber.ToString()); er.SystemProperties.Add("x-opt-offset", eventData.SystemProperties.Offset.ToString()); er.SystemProperties.Add("x-opt-enqueued-time", eventData.SystemProperties.EnqueuedTimeUtc.ToString()); er.Json = JsonConvert.SerializeObject(er); _eventsData.EventsJson += er.Json + "\r\n"; _eventsData.Events.Add(er); } return(context.CheckpointAsync()); }
/// <summary> /// Gets Sample Events /// </summary> /// <param name="seconds">seconds for which the sample data is fetched</param> /// <returns>Returns EventsData object</returns> public async Task <EventsData> GetSampleEvents(int seconds) { EventsData eventsData = new EventsData(); const int numberOfDocumentsToRead = 500; foreach (var batchInput in _batchInputs) { var connection = Helper.GetSecretFromKeyvaultIfNeeded(batchInput.Properties.Connection); var wasbPath = Helper.GetSecretFromKeyvaultIfNeeded(batchInput.Properties.Path); if (!Uri.TryCreate(wasbPath, UriKind.Absolute, out var uri)) { throw new ArgumentException($"Malformed Uri for the blob path:'{wasbPath}'. The blob path should be a wasbs url. e.g. wasbs://[email protected]/mypath"); } var path = uri.Host + "/" + uri.UserInfo + uri.LocalPath; var pathPattern = BlobHelper.GenerateRegexPatternFromPath(path); var containerName = uri.UserInfo; var prefix = BlobHelper.ParsePrefix(wasbPath); var contents = await BlobHelper.GetLastModifiedBlobContentsInBlobPath(connection, containerName, prefix, pathPattern, numberOfDocumentsToRead).ConfigureAwait(false); foreach (var content in contents) { // Get raw data EventRaw er = new EventRaw { Raw = content, Properties = new Dictionary <string, string>() { { "Length", content.Length.ToString() } }, SystemProperties = new Dictionary <string, string>() { { "Length", content.Length.ToString() } } }; er.Json = JsonConvert.SerializeObject(er); eventsData.EventsJson += er.Json + "\r\n"; eventsData.Events.Add(er); } } return(eventsData); }
public async Task <EventsData> GetSampleEvents(int seconds) { var config = new ConsumerConfig { BootstrapServers = _brokerList, GroupId = _consumerGroup, EnableAutoCommit = false, SessionTimeoutMs = 6000, AutoOffsetReset = AutoOffsetReset.Latest }; // Set the authentication for EventHub Kafka if (_inputType == Constants.InputType_KafkaEventHub) { config.SecurityProtocol = SecurityProtocol.SaslSsl; config.SaslMechanism = SaslMechanism.Plain; config.SaslUsername = "******"; config.SaslPassword = _connectionString; config.SslCaLocation = _cacertLocation; } StartTimer(seconds); const int commitPeriod = 5; using (var consumer = new ConsumerBuilder <Ignore, string>(config).Build()) { EventsData eventsData = new EventsData(); consumer.Subscribe(_topics); try { while (true) { try { var consumeResult = consumer.Consume(new TimeSpan(0, 0, seconds)); if (_timeout) { _logger.LogInformation($"Closing consumer"); consumer.Close(); return(await Task.FromResult(eventsData).ConfigureAwait(false)); } if (consumeResult.IsPartitionEOF) { _logger.LogInformation($"Reached end of topic {consumeResult.Topic}, partition {consumeResult.Partition}, offset {consumeResult.Offset}."); continue; } _logger.LogInformation($"Received message at {consumeResult.TopicPartitionOffset}: {consumeResult.Value}"); // Get raw data EventRaw er = new EventRaw { Raw = consumeResult.Value, Properties = new Dictionary <string, string>() { { "HeadersCount", consumeResult.Headers.Count.ToString() } }, }; // Set properties (using the Headers) if (consumeResult.Headers != null && consumeResult.Headers.Count > 0) { for (int i = 0; i < consumeResult.Headers.Count; i++) { string key = consumeResult.Headers[i].Key; string val = System.Text.Encoding.UTF8.GetString(consumeResult.Headers[i].GetValueBytes()); er.Properties.Add(key, val); } } // Set the SystemProperties er.SystemProperties.Add("Topic", consumeResult.Topic); er.SystemProperties.Add("Partition", consumeResult.Partition.Value.ToString()); er.SystemProperties.Add("Offset", consumeResult.Offset.Value.ToString()); er.SystemProperties.Add("UtcDateTime", consumeResult.Timestamp.UtcDateTime.ToString()); er.SystemProperties.Add("UnixTimestampMs", consumeResult.Timestamp.UnixTimestampMs.ToString()); er.Json = JsonConvert.SerializeObject(er); eventsData.EventsJson += er.Json + "\r\n"; eventsData.Events.Add(er); if (consumeResult.Offset % commitPeriod == 0) { try { consumer.Commit(consumeResult); } catch (KafkaException e) { _logger.LogError($"Commit error: {e.Error.Reason}\n{e.ToString()}"); } } } catch (ConsumeException e) { _logger.LogError($"Consume error: {e.Error.Reason}\n{e.ToString()}"); } } } catch (OperationCanceledException e) { _logger.LogInformation($"Closing consumer"); consumer.Close(); return(await Task.FromResult(eventsData).ConfigureAwait(false)); } } }