public void Kafka_disabled_by_default(string configFile) { ConfigProvider configProvider = GetConfigProviderFromFile(configFile); IKafkaConfig kafkaConfig = configProvider.GetConfig <IKafkaConfig>(); Assert.AreEqual(false, kafkaConfig.Enabled, nameof(kafkaConfig.Enabled)); }
public KafkaProducer(IKafkaConfig config, K serializer) { _config = config; _serializer = serializer; _producerBldr = new ProducerBuilder <Null, string>(_config.ProducerConfig()); _producerBldr.SetErrorHandler(ErrorHandler); _producer = _producerBldr.Build(); }
public Task Init(INethermindApi api) { _api = api; var(getFromAPi, _) = _api.ForInit; _kafkaConfig = getFromAPi.Config <IKafkaConfig>(); _logger = getFromAPi.LogManager.GetClassLogger(); return(Task.CompletedTask); }
public KafkaConsumer(IKafkaConfig config, T deserializer, IKafkaProcessor <T> worker) { _config = config; _desrializer = deserializer; _worker = worker; consumeResult = new ConsumeResult <Ignore, string>(); partition = new Partition(); CurrentOffset = new Offset(); }
public KafkaProducer(IKafkaConfig kafkaConfig, IPubSubModelMapper modelMapper, IAvroMapper avroMapper, ILogManager logManager) { _kafkaConfig = kafkaConfig; _modelMapper = modelMapper; _avroMapper = avroMapper; _logger = logManager.GetClassLogger(); _topics = GetTopics(); }
public KafkaAdmin(ILogger <KafkaAdmin> logger, IKafkaConfig config) { this.config = new Dictionary <string, string> { { "bootstrap.servers", config.BootstrapServers }, { "sasl.mechanisms", "PLAIN" }, { "security.protocol", "SASL_SSL" }, { "sasl.username", config.UserName }, { "sasl.password", config.Password } }; this.logger = logger; }
public Producer(IKafkaConfig config, ILogger <Producer> logger) { this.config = new ProducerConfig { BootstrapServers = config.BootstrapServers, SaslMechanism = SaslMechanism.Plain, SecurityProtocol = SecurityProtocol.SaslSsl, SaslUsername = config.UserName, SaslPassword = config.Password }; this.logger = logger; }
public PassengerService(IPassengersDatabaseSettings settings, IKafkaConfig kafkaConfig) { var client = new MongoClient(settings.ConnectionString); var database = client.GetDatabase(settings.DatabaseName); _passengers = database.GetCollection <Passenger>(settings.PassengersCollectionName); if (kafkaConfig != null) { _producer = new ProducerWrapper(kafkaConfig); _kafkaTopics = kafkaConfig.KafkaTopics; } }
private async Task <IProducer> PrepareKafkaProducer(IBlockTree blockTree, IKafkaConfig kafkaConfig) { PubSubModelMapper pubSubModelMapper = new PubSubModelMapper(); AvroMapper avroMapper = new AvroMapper(blockTree); KafkaProducer kafkaProducer = new KafkaProducer(kafkaConfig, pubSubModelMapper, avroMapper, _context.LogManager); await kafkaProducer.InitAsync().ContinueWith(x => { if (x.IsFaulted && _logger.IsError) { _logger.Error("Error during Kafka initialization", x.Exception); } }); return(kafkaProducer); }
public async Task Execute() { if (_context.BlockTree == null) { throw new InvalidOperationException("Kafka producer initialization started before the block tree is ready."); } IKafkaConfig kafkaConfig = _context.Config <IKafkaConfig>(); if (kafkaConfig.Enabled) { IProducer kafkaProducer = await PrepareKafkaProducer(_context.BlockTree, kafkaConfig); _context.Producers.Add(kafkaProducer); } }
public Consumer(IKafkaConfig config, IRepository repository, ILogger <Consumer> logger) { this.config = new ConsumerConfig { BootstrapServers = config.BootstrapServers, GroupId = "webapi-integration", SaslMechanism = SaslMechanism.Plain, SecurityProtocol = SecurityProtocol.SaslSsl, SaslUsername = config.UserName, SaslPassword = config.Password, AutoOffsetReset = AutoOffsetReset.Earliest }; this.repository = repository; this.logger = logger; }
public void Initialise(IKafkaConfig <TKey, TValue> config) { _logger.LogTrace(LoggingEvents.Initialise, "Initialising provider"); try { var builder = _serviceProvider.GetRequiredService <IPipelineBuilder>(); config.ConfigurePipeline(builder); _pipeline = builder.Build(); } catch (Exception ex) { // Exception during pipeline building - likely due to missing IoC wire up _logger.LogError( LoggingEvents.InitialiseError, ex, "Initialisation error" ); // No recovery possible. throw; } }
public KafkaConsumerHelper(IKafkaConfig config) { _config = config; }
public KafkaTransport(IKafkaConfig kafkaConfig) { KafkaConfig = kafkaConfig; }
public static void LogStart <TKey, TValue>(this ILogger logger, EventId eventId, IKafkaConfig <TKey, TValue> config) { if (logger == null) { throw new ArgumentNullException(nameof(logger)); } logger.LogInformation(eventId, "Starting KafkaConsumerService for BrokerUrls {BrokerUrls}, ConsumerGroupId {ConsumerGroupId}, Topics {Topics}", config.BrokerUrls, config.ConsumerGroupId, config.Topics); }
public KafkaProducer(IKafkaConfig kafkaConfig) { Producer = new Producer <TKey, TValue>(kafkaConfig.ProducerConfig); }
public ProducerWrapper(IKafkaConfig config) { _producer = !config.Enabled ? null : new ProducerBuilder <Null, string>(config.ProducerConfig).Build(); }
public KafkaConsumer(IKafkaConfig kafkaConfig) { Consumer = new Consumer <TKey, TValue>(kafkaConfig.ConsumerConfig); }