public KafkaEventPublisher( string topic, KafkaSettings settings, ISerializer <TKey> keySerializer, ISerializer <TEvent> valueSerializer, Func <TEvent, TKey> keyProvider) { _topic = topic; _keyProvider = keyProvider; var config = new ProducerConfig { BootstrapServers = string.Join(",", settings.BootstrapServers), Partitioner = Partitioner.Consistent }; var producerBuilder = new ProducerBuilder <TKey, TEvent>(config) .SetValueSerializer(valueSerializer); if (keySerializer != null) { producerBuilder.SetKeySerializer(keySerializer); } _producer = producerBuilder.Build(); }
public void TestSerializerObject() { var order = new Order() { EventID = "TestEventID4", OrderId = "TestOrderId4", OrderSubmitTime = DateTime.Now, Username = Guid.NewGuid().ToString(), ValueA = "TestValueA4", ValueB = "TestValueB4" }; var finalResult = TestSerializedDeserliazedData(order); Assert.Equal(finalResult.OrderId, order.OrderId); Assert.Equal(finalResult.EventID, order.EventID); var kafkaSettings = new KafkaSettings() { BrokerList = "TestBrokerList1", ConsumerGroupId = "TestConsumerGroupId", ProducerGroupId = "TestProducerGroupId", LogTopic = "TestTopic" }; var finalResult2 = TestSerializedDeserliazedData(kafkaSettings); Assert.Equal(finalResult2.ProducerGroupId, kafkaSettings.ProducerGroupId); Assert.Equal(finalResult2.BrokerList, kafkaSettings.BrokerList); var logItem = new LogItem() { Identifier = "TestIdentifier", Message = "TestMessage", Exception = "TestException", Type = "TestType" }; var finalResult3 = TestSerializedDeserliazedData(logItem); Assert.Equal(finalResult3.Identifier, logItem.Identifier); Assert.Equal(finalResult3.Message, logItem.Message); }
private static bool StartUpService() { try { var builder = new ConfigurationBuilder() .AddJsonFile(Constant.AppSettingsFile); _configuration = builder.Build(); _kafkaSettings = new KafkaSettings(); _configuration.GetSection("KafkaSettings").Bind(_kafkaSettings); var graylogSettings = new GraylogSettings(); _configuration.GetSection("GraylogSettings").Bind(graylogSettings); var serviceProvider = new ServiceCollection() .AddSingleton <IPubSubProvider, KafkaProvider>() .AddSingleton <ILogProvider, LogProvider>() .AddSingleton <ILogServiceManager, LogServiceManager>() .Configure <KafkaSettings>(_configuration.GetSection("KafkaSettings")) .Configure <GraylogSettings>(_configuration.GetSection("GraylogSettings")) .AddOptions() .BuildServiceProvider(); _pubSubProvider = serviceProvider.GetService <IPubSubProvider>(); _logProvider = serviceProvider.GetService <ILogProvider>(); _logServiceManager = serviceProvider.GetService <ILogServiceManager>(); return(true); } catch (Exception ex) { return(false); } }
public DeadLetterMessagesProducer(IOptions <KafkaSettings> configuration) { _configuration = configuration.Value; var config = new ProducerConfig { BootstrapServers = _configuration.ConnectionString, }; _producer = new ProducerBuilder <Null, string>(config).Build(); }
public BookingProcessor( IBusControl busControl, IBookingRepository bookingRepository, IKafkaProxy kafkaProxy, IOptions <KafkaSettings> kafkaSettings) { _busControl = busControl; _bookingRepository = bookingRepository; _kafkaProxy = kafkaProxy; _kafkaSettings = kafkaSettings.Value; }
public KafkaProducer(IOptions <KafkaSettings> configuration, IMessageSerializer <T> serializer) { _configuration = configuration.Value; var config = new ProducerConfig { BootstrapServers = _configuration.ConnectionString }; _producer = new ProducerBuilder <Null, T>(config) .SetValueSerializer(serializer) .Build(); _topicName = typeof(T).FullName; }
public BackgroundPerPartitionConsumer( KafkaSettings configuration, IKafkaMessageConsumerFactory consumerFactory, IDeadLetterMessagesProducer deadLetterMessagesProducer, IReadOnlyList <string> subscribedTopics) { _configuration = configuration; _consumerFactory = consumerFactory; _deadLetterMessagesProducer = deadLetterMessagesProducer; _subscribedTopics = subscribedTopics; _consumerCount++; _consumerId = _consumerCount; }
public static IServiceCollection AddKafkaTopicPublisher <TKey, TTopicEventBase>( this IServiceCollection services, string topic, KafkaSettings settings, ISerializer <TKey> keySerializer, ISerializer <TTopicEventBase> valueSerializer, Func <TTopicEventBase, TKey> keyProvider) => services.AddSingleton <IEventPublisher <TTopicEventBase> >( new KafkaEventPublisher <TKey, TTopicEventBase>( topic, settings, keySerializer, valueSerializer, keyProvider ));
public AffiliateCategoryKafkaConsumer(KafkaSettings settings, AffiliateCategoryDomainService domainService) { _domainService = domainService.ThrowIfNull(nameof(domainService)); settings.ThrowIfNull(nameof(settings)); _config = new ConsumerConfig { BootstrapServers = settings.BootstrapServers, SaslMechanism = SaslMechanism.Plain, SecurityProtocol = SecurityProtocol.SaslSsl, SaslUsername = settings.Username, SaslPassword = settings.Password, ClientId = settings.ClientId, GroupId = settings.GroupId, AutoOffsetReset = AutoOffsetReset.Earliest, EnableAutoCommit = false }; }
public KafkaProducerService(AppSettings appSettings, KafkaSettings kafkaSettings, ILogger <KafkaProducerService> logger) { _logger = logger; string bootstrapServers = kafkaSettings.BootstrapServers; ProducerConfig = new ProducerConfig { BootstrapServers = bootstrapServers, ClientId = Dns.GetHostName(), LingerMs = 200, BatchSize = 327680, CompressionType = CompressionType.Lz4, }; Topic = kafkaSettings.TopicName; Producer = new ProducerBuilder <Null, string>(ProducerConfig).Build(); MaxParallelProductionThreads = kafkaSettings.MaxParallelProductionThreads; _logger.LogInformation($"Created producer with bootstrapservers: {bootstrapServers} and topic {Topic}"); }
public KafkaProvider(KafkaSettings kafkaSettings, ILogger <KafkaProvider> logger) { _kafkaSettings = kafkaSettings; _logger = logger; _producerConfig = new ProducerConfig { BootstrapServers = _kafkaSettings.KafkaServer, ClientId = Dns.GetHostName(), MessageTimeoutMs = 10000, }; if (!_kafkaSettings.KafkaServer.StartsWith("localhost")) { _producerConfig.SaslMechanism = SaslMechanism.ScramSha256; _producerConfig.SecurityProtocol = SecurityProtocol.SaslSsl; _producerConfig.SaslUsername = _kafkaSettings.UserName; _producerConfig.SaslPassword = _kafkaSettings.Password; } }
public KafkaBus(KafkaSettings settings, IFailedEventRepository failedEventsEventRepository) { settings.ThrowIfNull(nameof(settings)); _failedEventsEventRepository = failedEventsEventRepository.ThrowIfNull(nameof(failedEventsEventRepository)); var config = new ProducerConfig { BootstrapServers = settings.BootstrapServers, Acks = Acks.All, MessageSendMaxRetries = 10000000, SslEndpointIdentificationAlgorithm = SslEndpointIdentificationAlgorithm.Https, SaslMechanism = SaslMechanism.Plain, SecurityProtocol = SecurityProtocol.SaslSsl, //SslCaLocation = "/usr/local/etc/openssl/cert.pem", SaslUsername = settings.Username, SaslPassword = settings.Password }; _producer = new ProducerBuilder <string, string>(config).Build(); }
public KafkaService(IOptions <KafkaSettings> kafkaConfig) { this._kafkaConfig = kafkaConfig.Value; string buildDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); string certLocation = Path.Combine(buildDirectory, _kafkaConfig.CaLocation); this._consumerConfig = new ConsumerConfig { BootstrapServers = this._kafkaConfig.Url, GroupId = this._kafkaConfig.ConsumerGroup, SaslMechanism = this._kafkaConfig.SaslMechanisms == "plain" ? SaslMechanism.Plain : (this._kafkaConfig.SaslMechanisms == "gssapi" ? SaslMechanism.Gssapi : SaslMechanism.Plain), SaslUsername = this._kafkaConfig.SaslUsername, SaslPassword = this._kafkaConfig.SaslPassword, SslCaLocation = certLocation, EnableAutoCommit = true, AutoOffsetReset = AutoOffsetReset.Earliest, MaxPollIntervalMs = (int?)TimeSpan.FromMinutes(30).TotalMilliseconds, SecurityProtocol = this._kafkaConfig.SecurityProtocol == "SASL_SSL" ? SecurityProtocol.SaslSsl : SecurityProtocol.Plaintext }; }
private static bool StartUpService() { try { var builder = new ConfigurationBuilder() .AddJsonFile(Constant.AppSettingsFile, optional: false, reloadOnChange: true); _configuration = builder.Build(); _kafkaSettings = new KafkaSettings(); _configuration.GetSection("KafkaSettings").Bind(_kafkaSettings); var mongoDbSettings = new MongoDbSettings(); _configuration.GetSection("MongoDbSettings").Bind(mongoDbSettings); var serviceProvider = new ServiceCollection() .AddSingleton <IPubSubProvider, KafkaProvider>() .AddSingleton <IOrderServiceManager, OrderServiceManager>() .AddSingleton <ILogProvider, LogProvider>() .AddSingleton <IMetricsProvider, PrometheusProvider>() .AddSingleton <IDataProvider, MongoDBProvider>(settings => new MongoDBProvider(mongoDbSettings.Host, mongoDbSettings.Database, mongoDbSettings.OrderCollection)) .Configure <KafkaSettings>(_configuration.GetSection("KafkaSettings")) .AddOptions() .BuildServiceProvider(); _pubSubProvider = serviceProvider.GetService <IPubSubProvider>(); _orderServiceManager = serviceProvider.GetService <IOrderServiceManager>(); _logProvider = serviceProvider.GetService <ILogProvider>(); _metricsProvider = serviceProvider.GetService <IMetricsProvider>(); var dbProvider = serviceProvider.GetService <IDataProvider>(); return(true); } catch (Exception ex) { return(false); } }
public static IServiceCollection IncludeKafka(this IServiceCollection services, KafkaSettings kafkaSettings, KafkaServiceSettings kafkaServiceSettings) { return(services .AddScoped(provider => kafkaSettings) .AddScoped(provider => kafkaServiceSettings.Producer) .AddScoped(provider => kafkaServiceSettings.Consumer) .AddSingleton <IKafkaProducer, KafkaProducer>() .AddSingleton <IKafkaConsumer, KafkaConsumer>() .AddHostedService <KafkaConsumerService>()); }
public static async Task <IActionResult> RunAsync( [HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)] HttpRequest req, ILogger log) { if (_producer == null) { var configuration = new ConfigurationBuilder() .AddJsonFile("local.settings.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .Build(); _timeoutInSeconds = configuration.GetValue <int>("TIMEOUT_SECONDS", 60); _kafkaSettings = new KafkaSettings(configuration); var producerConfig = _kafkaSettings.SSlEnabled ? new ProducerConfig { BootstrapServers = _kafkaSettings.Broker, SaslMechanism = SaslMechanism.Plain, SaslUsername = _kafkaSettings.Username, SaslPassword = _kafkaSettings.Password, SecurityProtocol = SecurityProtocol.SaslPlaintext, } : new ProducerConfig { BootstrapServers = _kafkaSettings.Broker, }; _producer = new ProducerBuilder <int, string>(producerConfig) .Build(); } string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); Person data = JsonConvert.DeserializeObject <Person>(requestBody); var result = "None"; if (data != null) { try { string json = JsonConvert.SerializeObject(data); var task = _producer.ProduceAsync( IConstants.Topic, new Message <int, string> { Key = data.Id, Value = json }); var timeout = _timeoutInSeconds * 1000; if (await Task.WhenAny(task, Task.Delay(timeout, new CancellationToken())) == task) { // Task completed within timeout. // Consider that the task may have faulted or been canceled. // We re-await the task so that any exceptions/cancellation is rethrown. var deliveryReport = await task; result = $"Data:[{data}] sent to Kafka - delivered to: {deliveryReport.TopicPartitionOffset}"; } else { // timeout/cancellation logic result = $"Timeout when trying to publish message data:[{data}] to kafka. SSL Enabled?:[{_kafkaSettings.SSlEnabled}]"; } } catch (Exception e) { result = $"Fail to publish message [{data}] to Kafka - Exception:[{e}]"; } } return((ActionResult) new OkObjectResult(result)); }
public KafkaService(ILogger <KafkaService> logger, IOptions <KafkaSettings> kafkaSettingsOption) { this._logger = logger; this._settings = kafkaSettingsOption.Value; }
public CollectorFunction(KafkaSettings kafkaSettings, DataConnectionFactory dbFactory, IMapper mapper) { _kafkaSettings = kafkaSettings; _dbFactory = dbFactory; _mapper = mapper; }