public void SubscribeBulk() { _logger.LogInformation("This is the topic" + _kafkaSetting.DatafordelereTopic); var AdresseList = new List <JsonValue>(); var hussnumerList = new List <JsonValue>(); _bulkInsertTimer = new Timer(); _bulkInsertTimer.Elapsed += new ElapsedEventHandler(CheckBulkStatus); _bulkInsertTimer.Interval = 1000; _bulkInsertTimer.Start(); var consumer = _consumer = Configure .Consumer(_kafkaSetting.DatafordelereTopic, c => c.UseKafka(_kafkaSetting.Server)) .Serialization(s => s.DatafordelerEventDeserializer()) .Topics(t => t.Subscribe(_kafkaSetting.DatafordelereTopic)) .Positions(p => p.StoreInFileSystem(_kafkaSetting.PositionFilePath)) .Handle(async(messages, context, token) => { foreach (var message in messages) { _lastMessageReceivedBulk = DateTime.UtcNow; if (message.Body is JsonObject) { if (!_topicList.ContainsKey(_kafkaSetting.DatafordelereTopic)) { _topicList.Add(_kafkaSetting.DatafordelereTopic, new List <JsonObject>()); _topicList[_kafkaSetting.DatafordelereTopic].Add((JsonObject)message.Body); } else { _topicList[_kafkaSetting.DatafordelereTopic].Add((JsonObject)message.Body); } if (_topicList[_kafkaSetting.DatafordelereTopic].Count >= 10000) { foreach (var obj in _databaseSetting.Values) { var tableName = obj.Key; var columns = obj.Value.Split(","); var batch = CheckObjectType(_topicList[_kafkaSetting.DatafordelereTopic], tableName); if (batch != null) { _postgresWriter.AddToPSQL(batch, tableName, columns, _databaseSetting.ConnectionString); } } _topicList[_kafkaSetting.DatafordelereTopic].Clear(); } } } }).Start(); }
private async Task HandleMessages(List <JObject> list, string topic, List <string> columns) { _postgresWriter.AddToPSQL(list, topic, columns); }