public async Task LogAsync(CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { using (var ct = new CancellationTokenSource(TimeSpan.FromSeconds(5))) { var item = _eventQueue.Dequeue(ct.Token); if (item == null) { await Task.Delay(TimeSpan.FromSeconds(5)); } else { try { var json = JsonConvert.SerializeObject(item); var message = new MessageBrokerMessage("message-key-1", json); await _messageBrokerProducer.ProduceAsync( _systemMetricsOptions.SystemMetricsChannelName , message); } catch (Exception) { } } } } }
public Task ProduceAsync(string topic, MessageBrokerMessage message) { _postEncountered++; _logger.LogInformation("Post saved. TS: {timestamp}, post {post}", DateTime.Now.ToString("s"), message.JsonPayloadPayload); //var post = JsonConvert.DeserializeObject<MyPost>(message.JsonPayloadPayload); //var lang = post.Query ?? "null"; // _queryResults.AddOrUpdate(lang, 1, (key, value) => value + 1); //if (_postEncountered % 100 == 0) //{ // var ser = _queryResults.ToDictionary(r => r.Key, r => r.Value); // _logger.LogInformation(JsonConvert.SerializeObject(ser)); //} lock (_lock) { var file = new FileInfo(_filePath); using (var writer = new StreamWriter(file.FullName, append: true)) { writer.WriteLine(message.JsonPayloadPayload); } } return(Task.CompletedTask); }
public Task ProduceAsync(string topic, MessageBrokerMessage message) { _logger.LogInformation("Topic {}, Message {}", topic, message.JsonPayloadPayload); return(Task.CompletedTask); }
public static KafkaMessage FromMessageBrokerMessage (MessageBrokerMessage registrationRequest) { return(new KafkaMessage() { Key = registrationRequest.Key, Value = registrationRequest.JsonPayloadPayload }); }
public async Task NotifyComponentAsync(string componentConfigChannelName, object notification) { var json = JsonConvert.SerializeObject(notification); var message = new MessageBrokerMessage("job-config-notification", json); _logger.LogInformation("Sending message {messageJson}", json); await _messageBrokerProducer.ProduceAsync(componentConfigChannelName, message); }
public async Task Register(RegistrationRequest registrationRequest) { var registrationRequestJson = JsonConvert.SerializeObject(registrationRequest); var messageBrokerMessage = new MessageBrokerMessage( "registration-key", registrationRequestJson); await _messageBrokerProducer.ProduceAsync( _registrationChannelName, messageBrokerMessage); }
private async Task SendRecordToOutputs(string[] outputChannels, MessageBrokerMessage messageBrokerMessage) { _logger.TrackStatistics( "SendingData", new { channels = outputChannels }); foreach (var outputChannel in outputChannels) { await _producer.ProduceAsync(outputChannel, messageBrokerMessage); } }
private async Task ReplayComponentsJobConfigs(MessageBrokerChannelModel channelModel) { var componentJobs = await _componentRegistry.GetAllComponentJobConfigsAsync( channelModel.ComponentId); foreach (var componentJob in componentJobs) { var serializedComponentJob = JsonConvert.SerializeObject(componentJob); var message = new MessageBrokerMessage("key", serializedComponentJob); await _producer.ProduceAsync(channelModel.UpdateChannelName, message); } }
public async Task ProduceAsync(string channelName, MessageBrokerMessage message) { var config = new ProducerConfig { BootstrapServers = _serverAddress }; var producerBuilder = new ProducerBuilder <string, string>(config); producerBuilder.SetErrorHandler( (producer, error) => { _logger.LogWarning(error.Reason); } ); try { while (true) { try { using (var producer = producerBuilder.Build()) { // Note: Awaiting the asynchronous produce request below prevents flow of execution // from proceeding until the acknowledgement from the broker is received (at the // expense of low throughput). var deliveryReport = await producer.ProduceAsync( channelName, KafkaMessage.FromMessageBrokerMessage(message) ); //producer.Produce( // channelName, KafkaMessage.FromMessageBrokerMessage(message) //); //_logger.LogInformation($"delivered to: {deliveryReport.TopicPartitionOffset}"); return; } } catch (KafkaException e) { _logger.LogError($"failed to deliver message: {e.Message} [{e.Error.Code}]"); } } } catch (OperationCanceledException) { // } }
public async Task SendCommands( string topic, IEnumerable <DataAcquirerCommand> commands, TimeSpan commandDelay) { foreach (var command in commands) { var serializedConfig = JsonConvert.SerializeObject(command); var message = new MessageBrokerMessage("some_key", serializedConfig); await _kafkaProducer.ProduceAsync(topic, message); _logger.LogInformation("Command sent to the topic '{topic}'. Command: {cmd}", topic, command); await Task.Delay(commandDelay); } }
public Task ProduceAsync(string topic, MessageBrokerMessage message) { _logger.LogInformation("Topic {}, Message {}", topic, message.JsonPayloadPayload); var checkForDuplicates = true; if (checkForDuplicates && topic == "s1") { var uniPost = JsonConvert.DeserializeObject <Post>(message.JsonPayloadPayload); if (_encounteredIds.Contains(uniPost.PostId)) { _logger.LogWarning("Post with id {id} has already been processed", uniPost.PostId); } else { _encounteredIds.Add(uniPost.PostId); } lock (_ioLock) { //using (var writer = new StreamWriter("outIds.txt")) //{ // var ids = new // { // Ids = _encounteredIds // }; // var idsJson = JsonConvert.SerializeObject(ids); // writer.WriteLineAsync(idsJson); //} } } return(Task.CompletedTask); }
private async Task ProcessBatch( DataAcquirerJobConfig jobConfig, DataAcquirerInputModel dataAcquirerInputModel, IAsyncEnumerable <DataAcquirerPost> batch, bool translate) { int count = 0; await foreach (var dataPost in batch) { LogProgress(jobConfig, count); count++; var postId = CalculatePostId(jobConfig, dataPost); var text = ClearText(dataPost.Text); string originalText = null; if (translate && dataPost.Language != "en" && dataPost.Language != null) { try { var translatedText = await _translationService .TranslateToEnglishAsync(dataPost.Language, text); originalText = text; text = translatedText; } catch (DataAcquirerException ex) { _logger.TrackWarning("TranslationError", "Could not translate", new { jobId = dataAcquirerInputModel.JobId, exception = ex, text }); } } var uniPost = UniPostModel.FromValues( postId, dataPost.OriginalPostId, text, originalText, dataPost.Language, dataPost.Source, dataPost.UserId, dataPost.DateTime, dataAcquirerInputModel.JobId, dataPost.Query); var jsonData = JsonConvert.SerializeObject(uniPost); var messageBrokerMessage = new MessageBrokerMessage( "acquired-data-post", jsonData); await SendRecordToOutputs(jobConfig.OutputMessageBrokerChannels, messageBrokerMessage); } }