public async Task StartAsync( IKafkaConsumer consumer, IEnumerable <TopicPartition> partitions, CancellationToken stopCancellationToken = default) { this.offsetManager = new OffsetManager( new OffsetCommitter( consumer, consumer.Configuration.AutoCommitInterval, this.logHandler), partitions); await Task.WhenAll( Enumerable .Range(0, consumer.Configuration.WorkerCount) .Select( workerId => { var worker = new ConsumerWorker( consumer, workerId, this.offsetManager, this.logHandler, this.middlewareExecutor); this.workers.Add(worker); return(worker.StartAsync(stopCancellationToken)); })) .ConfigureAwait(false); this.distributionStrategy = this.distributionStrategyFactory(this.dependencyResolver); this.distributionStrategy.Init(this.workers.AsReadOnly()); }
public static void InitializeKafkaConsumer(IHandler handler) { CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); ConsumerSettings settings = null; settings = new ConsumerSettings() { GroupName = "CustomerServiceAutomation", TopicName = CustomerServiceConfig.CustomerServiceTopic, KafkaUri = KafkaUri, AutoCommit = true, BatchRead = false, ReadAsync = true, ReadFromEnd = false, MaxQueueSize = 50000, MaxBatchSize = 2000 }; if (KafkaDriver.Equals(RPL)) { CommonConsumer = new RPLConsumer(); } else if (KafkaDriver.Equals(MISAKAI)) { CommonConsumer = new MisakaiConsumer(); } else { CommonConsumer = new JavaConsumer(); } LogResult.Report(Log, "log_ForInfo", "Kafka Consumer Initialized For " + "Topic: " + CustomerServiceConfig.CustomerServiceTopic); KafkaConsumer = Task.Factory.StartNew(() => CommonConsumer.StartConsuming(handler, settings, cancellationTokenSource.Token)); }
public PayDayConnector(ILogger <PayDayConnector> logger, IKafkaConsumer consumer, IKafkaConfiguration configuration, IMessageHandler <IOutputManager> outputHandler) { _logger = logger; _consumer = consumer; _configuration = configuration; _outputHandler = outputHandler; }
private void OnConsumerAdded(IKafkaConsumer kafkaConsumer) { Debug.Log("Picked Up"); var listItem = Instantiate(_consumerListItemPrefab, _scrollViewContent.transform) .GetComponent <ConsumerListItem>(); listItem.Configure(kafkaConsumer.Config); }
public MessageReceiver(IOptions <MessageReceiverOptions> messageReceiverOptions, IKafkaConsumer <K, V> kafkaConsumer, ILogger <FileSender> logger) { _logger = logger; _kafkaConsumer = kafkaConsumer; _messageReceiverOptions = messageReceiverOptions.Value; _receiveTimeout = TimeSpan.FromMilliseconds(_messageReceiverOptions.ReceiveTimeout); }
public MirrorProcessor(IKafkaConsumer kafkaConsumer, IKafkaProducer kafkaProducer, IMirrorTopicHandler mirrorTopicHandler, IEnvironmentConfigProvider environmentConfigProvider) { _kafkaConsumer = kafkaConsumer; _kafkaProducer = kafkaProducer; _mirrorTopicHandler = mirrorTopicHandler; _config = environmentConfigProvider.GetEnvironmentConfig(); }
protected AbstractKafkaPipeline(ILogger <AbstractKafkaPipeline <TConsumerKey, TConsumerValue, TPipe> > logger, IConsumerFactory <TConsumerKey, TConsumerValue> consumerFactory, IServiceProvider serviceProvider) : base(serviceProvider) { Logger = logger; Consumer = consumerFactory.CreateConsumer(); }
public FileReceiver(IOptions <FileReceiverOptions> fileReceiverOptions, IKafkaConsumer <string, byte[]> kafkaConsumer, ILogger <FileSender> logger) { _logger = logger; _kafkaConsumer = kafkaConsumer; _fileReceiverOptions = fileReceiverOptions.Value; _receiveTimeout = TimeSpan.FromMilliseconds(_fileReceiverOptions.ReceiveTimeout); }
public MessageConsumer( IKafkaConsumer consumer, IConsumerWorkerPool workerPool, ILogHandler logHandler) { this.workerPool = workerPool; this.logHandler = logHandler; this.consumer = consumer; }
public PayDayConnector(ILogger <PayDayConnector> logger, IKafkaConsumer consumer, IKafkaConfiguration configuration, IMessageHandler <IAllocationManager> outputAllocationHandler, IMessageHandler <IMetaManager> outputMetaHandler) { _logger = logger; _consumer = consumer; _configuration = configuration; _outputAllocationHandler = outputAllocationHandler; _outputMetaHandler = outputMetaHandler; setHandlers(); }
/// <summary> /// Initializes a new instance of the <see cref="VotingDatabase"/> class. /// </summary> /// <param name="context">The context.</param> /// <param name="votingDatabaseMessageHandler">Voting database message handler.</param> /// <param name="votingDatabaseParameters">The database consumer parameters.</param> /// <param name="kafkaConsumer">The kafka consumer.</param> public VotingDatabase(StatelessServiceContext context, IVotingDatabaseMessageHandler votingDatabaseMessageHandler, VotingDatabaseParameters votingDatabaseParameters, IKafkaConsumer <string, string> kafkaConsumer) : base(context) { this.votingDatabaseParameters = votingDatabaseParameters; this.kafkaConsumer = kafkaConsumer; this.votingDatabaseMessageHandler = votingDatabaseMessageHandler; }
public MessageContextConsumer( IKafkaConsumer consumer, IOffsetManager offsetManager, ConsumeResult <byte[], byte[]> kafkaResult, CancellationToken workerStopped) { this.WorkerStopped = workerStopped; this.consumer = consumer; this.offsetManager = offsetManager; this.kafkaResult = kafkaResult; }
public InitiateKycService( IKafkaConsumer <string, InitiateKycResponseModel> consumer, IIdentityServerClient identityServerClient, IHttpClientFactory httpClientFactory, ILogger <InitiateKycService> logger) { _consumer = Guard.IsNotNull(consumer, nameof(consumer)); _identityServerClient = Guard.IsNotNull(identityServerClient, nameof(identityServerClient)); Guard.IsNotNull(httpClientFactory, nameof(httpClientFactory)); _httpClient = httpClientFactory.CreateClient("kyc"); _logger = Guard.IsNotNull(logger, nameof(logger)); }
public KafkaAsyncMessageQueueService( IKafkaProducerPool connectionPool, ILogger <KafkaAsyncMessageQueueService> logger, IBinarySerializer serializer, IKafkaConsumer consumer, IServiceProvider serviceProvider) { _connectionPool = connectionPool; _logger = logger; _serializer = serializer; _consumer = consumer; _serviceProvider = serviceProvider; }
public VerifyIdentityService( IKafkaConsumer <string, VerificationResponseModel> consumer, IIdentityServerClient identityServerClient, IHttpClientFactory httpClientFactory, IMapper mapper, ILogger <VerifyIdentityService> logger) { _consumer = Guard.IsNotNull(consumer, nameof(consumer)); _identityServerClient = Guard.IsNotNull(identityServerClient, nameof(identityServerClient)); Guard.IsNotNull(httpClientFactory, nameof(httpClientFactory)); _httpClient = httpClientFactory.CreateClient("identity"); _mapper = Guard.IsNotNull(mapper, nameof(mapper)); _logger = Guard.IsNotNull(logger, nameof(logger)); }
public ConsumerWorker( IKafkaConsumer consumer, int workerId, IOffsetManager offsetManager, ILogHandler logHandler, IMiddlewareExecutor middlewareExecutor) { this.Id = workerId; this.consumer = consumer; this.offsetManager = offsetManager; this.logHandler = logHandler; this.middlewareExecutor = middlewareExecutor; this.messagesBuffer = Channel.CreateBounded <ConsumeResult <byte[], byte[]> >(consumer.Configuration.BufferSize); }
public CheckMrzStatusService( IKafkaConsumer <string, CheckMrzStatusResponseModel> consumer, IIdentityServerClient identityServerClient, IHttpClientFactory httpClientFactory, IMapper mapper, ILogger <CheckMrzStatusService> logger) { _consumer = Guard.IsNotNull(consumer, nameof(consumer)); _identityServerClient = Guard.IsNotNull(identityServerClient, nameof(identityServerClient)); Guard.IsNotNull(httpClientFactory, nameof(httpClientFactory)); _httpClient = httpClientFactory.CreateClient("kyc"); _mapper = Guard.IsNotNull(mapper, nameof(mapper)); _logger = Guard.IsNotNull(logger, nameof(logger)); }
public Worker( IKafkaConsumer <string, Provider> ukrlpConsumer, IFeProviderManager providerManager, IOptions <DataServicesPlatformConfiguration> options, IMapper mapper, ILogger <Worker> logger) { _ukrlpConsumer = ukrlpConsumer; _providerManager = providerManager; _mapper = mapper; _configuration = options.Value; _logger = logger; _ukrlpConsumer.SetMessageHandler(ProcessMessageFromTopic); }
public Worker( IKafkaConsumer <string, Sld.Learner> sldConsumer, ILearnerManager learnerManager, ILearningDeliveryManager learningDeliveryManager, IOptions <DataServicesPlatformConfiguration> options, IMapper mapper, ILogger <Worker> logger) { _sldConsumer = sldConsumer; _learnerManager = learnerManager; _learningDeliveryManager = learningDeliveryManager; _mapper = mapper; _configuration = options.Value; _logger = logger; _sldConsumer.SetMessageHandler(ProcessMessageFromTopic); }
public KafkaProducerController(IConfiguration configuration, ILogger <KafkaProducerController> logger, IKafkaProducer producer, ISynchronzationUtil synchronzationUtil, IKafkaConsumer consumer, IMapper mapper) { this._configuration = configuration; this._logger = logger; this._producer = producer; this._synchronzationUtil = synchronzationUtil; this._consumer = consumer; //this._consumerThread = consumerThread; this._mapper = mapper; //_consumerThread.StartConsumerThread(); topicName = _configuration["ConfigProperties:Kafka:TopicName"]; _logger.LogInformation("Constructor called"); }
private async Task ConsumeInternalAsync(IKafkaConsumer kafkaConsumer, CancellationToken cancellationToken) { var builder = new ConsumerBuilder <string, string>(_configOptions.ConsumerConfig()); using IConsumer <string, string> consumer = builder.Build(); try { consumer.Subscribe(kafkaConsumer.Topic); try { while (!_cancelled && !cancellationToken.IsCancellationRequested) { ConsumeResult <string, string> consumed = consumer.Consume(cancellationToken); _logger.LogInformation($"{kafkaConsumer.GetType().Name}: consumed"); await kafkaConsumer.ConsumeAsync(consumed.Message.Value); } } catch (ConsumeException e) { _logger.LogError(e, kafkaConsumer.Topic + $"\r\nAn exception during consuming\r\n" + $"Reason: {e.Error.Reason}\r\n" + $"Consumer is being closed"); } } catch (Exception exception) { _logger.LogError( exception, $"Topic: {kafkaConsumer.Topic}\r\n" + $"An exception during consuming\r\n"); } finally { _logger.LogInformation($"{kafkaConsumer.Topic}. Consumer is being closed"); consumer.Close(); } }
public async Task InitializeAsync() { _host = Host.CreateDefaultBuilder() .ConfigureServices((ctx, x) => x.Configure <KafkaProviderOptions>(ctx.Configuration.GetSection(KafkaProviderOptions.DefaultSectionName)) .AddActivityContextAccessor() .AddKafkaProducer <string, MockMessage>(options => options .FromConfiguration(ctx.Configuration) .WithJson() .WithTopic("new-mock-message-topic")) .AddKafkaConsumer <string, MockMessage>(options => options .FromConfiguration(ctx.Configuration) .WithJson() .WithTopic("new-mock-message-topic")) ) .Build(); await _host.StartAsync(); _kafkaProducer = _host.Services.GetRequiredService <IKafkaProducer <string, MockMessage> >(); _kafkaConsumer = _host.Services.GetRequiredService <IKafkaConsumer <string, MockMessage> >(); _activityContextAccessor = _host.Services.GetRequiredService <IActivityContextAccessor>(); _semaphore = new SemaphoreSlim(0, 1); }
public Task StartAsync(CancellationToken cancellationToken) { Task.Run(() => { using (IKafkaConsumer <Ignore, string> c = this._kafkaConsumerProvider.GetKafkaConsumer <Ignore, string>("consumer-selection-count")) { c.Subscribe(); try { while (true) { try { var cr = c.Consume(cancellationToken); _logger.LogInformation($"1. Consumed message '{cr.Message.Value}' at: '{cr.TopicPartitionOffset}'."); SelectionSummaryKafkaMessage kafkaSocketMessage = null; try { kafkaSocketMessage = cr.Message?.Value?.FromJSON <SelectionSummaryKafkaMessage>(); } catch (Exception ex) { _logger.LogError($"Exception occured in KafkaSocketMessage deserialization: {ex.Message}"); } if (kafkaSocketMessage != null) { SocketMessage calculationResultMessage = new SocketMessage() { Code = "selection_count_calculated", Data = kafkaSocketMessage }; Task.Run(async() => { await _sampleSocketMessageHandler.SendMessageAsync(kafkaSocketMessage.UserId, calculationResultMessage.ToJSON()); }); } } catch (ConsumeException e) { _logger.LogError($"Error occured: {e.Error.Reason}"); } } } catch (OperationCanceledException e) { _logger.LogError($"OperationCanceledException occured: {e.Message}"); c.Close(); } catch (Exception ex) { _logger.LogError($"Exception occured: {ex.Message}"); } } }, cancellationToken); Task.Run(() => { using (IKafkaConsumer <Ignore, string> c = this._kafkaConsumerProvider.GetKafkaConsumer <Ignore, string>("consumer-selection-synchronization")) { c.Subscribe(); try { while (true) { try { var cr = c.Consume(cancellationToken); _logger.LogInformation($"2. Consumed message '{cr.Message.Value}' at: '{cr.TopicPartitionOffset}'."); SelectionSummaryKafkaMessage[] kafkaSocketMessage = null; try { kafkaSocketMessage = cr.Message?.Value?.FromJSON <SelectionSummaryKafkaMessage[]>(); } catch (Exception ex) { _logger.LogError($"Exception occured in KafkaSocketMessage deserialization: {ex.Message}"); } if (kafkaSocketMessage != null) { SocketMessage calculationResultMessage = new SocketMessage() { Code = "selection_synchronization_calculated", Data = kafkaSocketMessage }; Task.Run(async() => { await _sampleSocketMessageHandler.SendMessageAsync(kafkaSocketMessage.First().UserId, calculationResultMessage.ToJSON()); }); } } catch (ConsumeException e) { _logger.LogError($"Error occured: {e.Error.Reason}"); } } } catch (OperationCanceledException e) { _logger.LogError($"OperationCanceledException occured: {e.Message}"); c.Close(); } catch (Exception ex) { _logger.LogError($"Exception occured: {ex.Message}"); } } }, cancellationToken); return(Task.CompletedTask); }
private static IKafkaConsumer <TKey, TValue> InitializeConsumer <TKey, TValue>(IKafkaConsumer <TKey, TValue> consumer) { var cancelToken = new CancellationToken(); while (true) { var consumeResult = consumer.Consumer.Consume(cancelToken); } }
public BackgroundHostedService(IKafkaProducer producer, IKafkaConsumer consumer) { _producer = producer; _consumer = consumer; }
public override void Dispose() { _consumer = null; base.Dispose(); }
//TODO:: messagehandler type should represent the topic public PayDayConnector(ILogger <PayDayConnector> logger, IKafkaConsumer consumer, IMessageHandler <ICalculationEngineService> calcEngineHandler) { _logger = logger; _consumer = consumer; _calcEngineHandler = calcEngineHandler; }
public ConsumerService(IKafkaConsumer <string, CreatePerson> kafkaConsumer, ILogger <ConsumerService> logger, ICreatePersonService createPersonService) { _kafkaConsumer = kafkaConsumer; _logger = logger; _createPersonService = createPersonService; }
public static IMessageSubscription Subscribe <T>( this IKafkaConsumer consumer, Func <IMessage <T>, Task> handler) where T : class => consumer.Subscribe(TopicDefinition.FromType <T>().Topic, handler);
public KafkaMessagePump(ChannelWriter <Message <TValue> > channelWriter, ILogger <KafkaMessagePump <TKey, TValue> > logger, IKafkaConsumer <TKey, TValue> kafkaConsumer, string consumerId) : base(channelWriter, logger) { _kafkaConsumer = kafkaConsumer ?? throw new ArgumentNullException(nameof(kafkaConsumer)); _consumerId = consumerId ?? throw new ArgumentNullException(nameof(consumerId)); }