public static void AddKafka(this IServiceCollection services, IConfiguration configuration) { if (services == null) { throw new ArgumentNullException(nameof(services)); } _kafkaConfiguration = configuration.GetSection("KafkaConfiguration").Get <KafkaConfiguration>(); services.AddMassTransit(bus => { bus.UsingInMemory((ctx, cfg) => cfg.ConfigureEndpoints(ctx)); bus.AddRider(rider => { rider.AddConsumer(); rider.AddProducers(); rider.UsingKafka((ctx, k) => { k.Host(_kafkaConfiguration.ConnectionString); k.TopicEndpoint <StockConfirmationIntegrationEvent>(_kafkaConfiguration.ProductsStockResponseSuccess, _kafkaConfiguration.Group, e => { e.ConfigureConsumer <StockConfirmationConsumer>(ctx); e.CreateIfMissing(options => { options.NumPartitions = 3; options.ReplicationFactor = 1; }); }); k.TopicEndpoint <StockConfimationFailIntegrationEvent>(_kafkaConfiguration.ProductsStockResponseFail, _kafkaConfiguration.Group, e => { e.ConfigureConsumer <StockConfirmationFailConsumer>(ctx); e.CreateIfMissing(options => { options.NumPartitions = 3; options.ReplicationFactor = 1; }); }); k.TopicEndpoint <BuyerCreatedIntegrationEvent>(_kafkaConfiguration.AuthBuyerCreated, _kafkaConfiguration.Group, e => { e.ConfigureConsumer <BuyerCreatedConsumer>(ctx); e.CreateIfMissing(options => { options.NumPartitions = 3; options.ReplicationFactor = 1; }); }); }); }); }); services.AddMassTransitHostedService(); }
public void ConfigureContainer(ContainerBuilder builder) { BsonMapping(); // Add any Autofac modules or registrations. // This is called AFTER ConfigureServices so things you // register here OVERRIDE things registered in ConfigureServices. // // You must have the call to AddAutofac in the Program.Main // method or this won't be called. builder.RegisterModule(new AutofacModule()); var amqpUri = Configuration.GetSection("RabbitMq").Get <RabbitMqSettings>().ConnectionString; var kafkaUri = Configuration.GetSection("Kafka").Get <KafkaSettings>().BrokerAddresses; var mongoDbSettings = Configuration.GetSection("MongoDb").Get <MongoDbSettings>(); var container = EventFlowOptions.New .UseAutofacContainerBuilder(builder) // Must be the first line! .AddDefaults(Assembly.GetExecutingAssembly()) .UseMongoDbReadModel <AppointmentReadModel>() .UseMongoDbInsertOnlyReadModel <AppointmentInsertReadModel>() .UseMongoDbReadModel <PaymentDetailsReadModel>() .UseMongoDbInsertOnlyReadModel <PaymentDetailsReadModel>() .UseMongoDbEventStore() .UseMongoDbSnapshotStore() .PublishToKafka(KafkaConfiguration.With(kafkaUri)) .UseLibLog(LibLogProviders.Serilog) .ConfigureMongoDb(mongoDbSettings.ConnectionString, mongoDbSettings.Database); }
static IHostBuilder CreateHostBuilder(string[] args) => Host.CreateDefaultBuilder(args) .ConfigureServices((hostContext, services) => { services.AddLogging(cfg => { cfg.AddConsole(); }) .AddOpenSleigh(cfg => { var mongoSection = hostContext.Configuration.GetSection("Mongo"); var mongoCfg = new MongoConfiguration(mongoSection["ConnectionString"], mongoSection["DbName"], MongoSagaStateRepositoryOptions.Default, MongoOutboxRepositoryOptions.Default); var kafkaConnStr = hostContext.Configuration.GetSection("ConnectionStrings")["Kafka"]; var kafkaCfg = new KafkaConfiguration(kafkaConnStr); cfg.UseKafkaTransport(kafkaCfg) .UseMongoPersistence(mongoCfg); cfg.AddSaga <SimpleSaga, SimpleSagaState>() .UseStateFactory <StartSimpleSaga>(msg => new SimpleSagaState(msg.CorrelationId)) .UseKafkaTransport(); cfg.AddSaga <ParentSaga, ParentSagaState>() .UseStateFactory <StartParentSaga>(msg => new ParentSagaState(msg.CorrelationId)) .UseKafkaTransport(); cfg.AddSaga <ChildSaga, ChildSagaState>() .UseStateFactory <StartChildSaga>(msg => new ChildSagaState(msg.CorrelationId)) .UseKafkaTransport(); }); });
public WeatherForecastController(ILogger <WeatherForecastController> logger, KafkaConfiguration kafkaConfiguration, KafkaProducer <WeatherForecast> producer, TelemetryClient telemetryClient) { _logger = logger; _kafkaConfiguration = kafkaConfiguration; _producer = producer; _telemetryClient = telemetryClient; }
public KafkaSettingsFactory WithConfiguration(KafkaConfiguration conf) { return(this .WithGroupId(conf.GroupId) .WithBootstrapServers(conf.BootstrapServers) .WithDebugLogging(conf.DebugLogging)); }
public ValuesController(KafkaConfiguration config, ConsumerConfiguration consumerconf) { _config = config; topic = config.topic; _consumerconf = consumerconf; consumertopic = consumerconf.topic; }
public KafkaProducerService(ILogger <KafkaProducerService> logger, IOptions <KafkaConfiguration> kafkaConfigurationOptions) { _logger = logger ?? throw new ArgumentException(nameof(logger)); _kafkaConfiguration = kafkaConfigurationOptions?.Value ?? throw new ArgumentException(nameof(kafkaConfigurationOptions)); Init(); }
public ProducerConfig SetupProducerConfig(KafkaConfiguration kafkaConfiguration) { var config = new ProducerConfig(); config.BootstrapServers = $"{kafkaConfiguration.BootstrapServer}:{kafkaConfiguration.Port}"; config.ClientId = Dns.GetHostName(); config.Acks = kafkaConfiguration.Acknowledgements?.ToLower() switch { "all" => Acks.All, "leader" => Acks.Leader, "none" => Acks.None, _ => Acks.All }; if (!String.IsNullOrWhiteSpace(kafkaConfiguration.SaslUsername) && !String.IsNullOrWhiteSpace(kafkaConfiguration.SaslPassword)) { config.SaslUsername = kafkaConfiguration.SaslUsername; config.SaslPassword = kafkaConfiguration.SaslPassword; } else { config.SecurityProtocol = SecurityProtocol.Plaintext; } return(config); }
public void GetBootstrapServerString_ConcatsServerString() { //Arrange KafkaConfiguration config = new KafkaConfiguration(); List <KafkaServer> servers = new List <KafkaServer>() { new KafkaServer() { Address = "localhost", Port = 92 }, new KafkaServer() { Address = "::1", Port = 95 } }; string expectedString = $"{servers[0].ToString()},{servers[1].ToString()}"; //Act config.BootstrapServers = servers; string bootstrapServerString = config.GetBootstrapServerString(); //Assert bootstrapServerString.Should().Be(expectedString); }
public KafkaConsumer(IConfiguration configuration, ILogger <KafkaConsumer> logger, IMessageHandler messageHandler) { _logger = logger; _messageHandler = messageHandler; _configuration = new KafkaConfiguration(configuration); }
public KafkaProducer(KafkaConfiguration kafkaConfiguration) { var producerConfig = new ProducerConfig(kafkaConfiguration.ClientConfig); _producer = new ProducerBuilder <Null, WeatherForecast>(producerConfig) .SetValueSerializer(new JsonSerializer <WeatherForecast>()) .Build(); }
public void ctor_should_throw_when_input_null() { var groupIdFactory = NSubstitute.Substitute.For <IGroupIdFactory>(); var config = new KafkaConfiguration("lorem"); Assert.Throws <ArgumentNullException>(() => new ConsumerBuilderFactory(null, config)); Assert.Throws <ArgumentNullException>(() => new ConsumerBuilderFactory(groupIdFactory, null)); }
public MessagePublisher(KafkaConfiguration kafkaConfiguration, KafkaProducerConfiguration producerConfig, string topic, IMessageSerializer serializer, Func <TMessage, Dictionary <string, string> > propertyProvider) { _kafkaConfiguration = kafkaConfiguration; _topic = topic; _serializer = serializer; _propertyProvider = propertyProvider; _producerConfig = producerConfig; }
public void GetConfiguration_HasNoQueueWithId_ReturnsNull() { var configuration = new KafkaConfiguration(); var result = configuration.GetQueueConfiguration("id"); Assert.That(result == null); }
public void TopicProperty_GivenNullTopics_TopicShouldBeNull() { var config = new KafkaConfiguration { Topics = new[] { null as string } }; config.Topic.Should().BeNull(); }
public KafkaProducer(IConfiguration configuration, ILogger <KafkaProducer> logger) { _logger = logger; _defaultConfig = new KafkaConfiguration(configuration); if (!int.TryParse(configuration["WAIT_TIME"], out _waitTimeMilliseconds)) { _waitTimeMilliseconds = 10; } }
public static IBusConfigurator UseKafkaTransport(this IBusConfigurator busConfigurator, KafkaConfiguration config, Action <IKafkaBusConfigurationBuilder> builderFunc) { busConfigurator.Services.AddSingleton(config); busConfigurator.Services.AddSingleton <IQueueReferenceFactory>(ctx => new QueueReferenceFactory(ctx, config.DefaultQueueReferenceCreator)); busConfigurator.Services.AddSingleton(ctx => { var kafkaConfig = ctx.GetRequiredService <KafkaConfiguration>(); return(new AdminClientConfig() { BootstrapServers = kafkaConfig.ConnectionString }); }); busConfigurator.Services.AddSingleton(ctx => { var adminClientConfig = ctx.GetRequiredService <AdminClientConfig>(); return(new AdminClientBuilder(adminClientConfig)); }); busConfigurator.Services.AddSingleton(ctx => { var kafkaConfig = ctx.GetRequiredService <KafkaConfiguration>(); return(new ProducerConfig() { BootstrapServers = kafkaConfig.ConnectionString }); }); busConfigurator.Services.AddSingleton(ctx => { var config = ctx.GetRequiredService <ProducerConfig>(); var builder = new ProducerBuilder <Guid, byte[]>(config); builder.SetKeySerializer(new KeySerializer <Guid>()); return(builder); }); busConfigurator.Services.AddSingleton <IProducer <Guid, byte[]> >(ctx => { var builder = ctx.GetRequiredService <ProducerBuilder <Guid, byte[]> >(); return(builder.Build()); }); busConfigurator.Services.AddTransient <IKafkaPublisherExecutor, KafkaPublisherExecutor>(); busConfigurator.Services.AddTransient <IPublisher, KafkaPublisher>(); busConfigurator.Services.AddTransient <IMessageParser, MessageParser>(); busConfigurator.Services.AddSingleton <IKafkaMessageHandler, KafkaMessageHandler>(); busConfigurator.Services.AddSingleton <IGroupIdFactory, DefaultGroupIdFactory>(); busConfigurator.Services.AddSingleton <IConsumerBuilderFactory, ConsumerBuilderFactory>(); builderFunc?.Invoke(new DefaultKafkaBusConfigurationBuilder(busConfigurator)); return(busConfigurator); }
public void Constructor_InitializesBootstrapServers() { //Arrange KafkaConfiguration config = new KafkaConfiguration(); //Act //Assert config.BootstrapServers.Should().NotBeNull(); }
public MessagePublisher(KafkaConfiguration kafkaConfiguration, KafkaProducerConfiguration producerConfig, string topic, IMessageSerializer serializer, Func <TMessage, Dictionary <string, string> > propertyProvider) { _topic = topic; _serializer = serializer; _propertyProvider = propertyProvider; _producerConfig = new ProducerConfig { BootstrapServers = kafkaConfiguration.BootstrapServers, BatchNumMessages = producerConfig.BatchFlushSize }; }
public void KafkaConfuguration_GetControllerTopicName() { var configSettings = new KafkaConfigSettings() { SaslKerberosPrincipal = "blah", GroupId = "group", ControllerTopicName = "test" }; var objectUnderTest = new KafkaConfiguration(configSettings); Assert.True(objectUnderTest.ControllerTopicName == "test"); }
public void TopicProperty_GivenValidTopic_ShouldSetAndGetTopicProperty() { var topic = Guid.NewGuid().ToString(); var config = new KafkaConfiguration { Topic = topic }; config.Topic.Should().Be(topic); }
public async Task CreateTopic(KafkaConfiguration configuration) { _configuration = configuration; var topic = configuration.Topics !.First(); await AdminClientHelper.CreateTopicAsync(configuration.BootstrapServers, topic, 10); }
public void TopicProperty_GivenTopic_ShouldGetTopics() { var topic = "myTopic"; var config = new KafkaConfiguration { Topic = topic }; config.Topics !.First().Should().Be(topic); }
public void TopicProperty_GivenTopics_ShouldGetTopic() { var topic = "myTopic"; var config = new KafkaConfiguration { Topics = new [] { topic } }; config.Topic.Should().Be(topic); }
public void GivenValidConfiguration_ShouldCreateTestConsumer() { var config = new KafkaConfiguration { Topics = new[] { "MyTopic" }, GroupId = "groupId" }; _ = new TestConsumer(config); }
public KafkaConsumerService(ILogger <KafkaConsumerService> logger, IOptions <KafkaConfiguration> kafkaConfigurationOptions //, IScheduleConfig<KafkaConsumerService> config , IServiceScopeFactory serviceScopeFactory ) : base(null, null) { _logger = logger ?? throw new ArgumentException(nameof(logger)); _kafkaConfiguration = kafkaConfigurationOptions?.Value ?? throw new ArgumentException(nameof(kafkaConfigurationOptions)); _serviceScopeFactory = serviceScopeFactory; Init(); }
public void KafkaConfuguration_GetLocalConsumerConfig() { var configSettings = new KafkaConfigSettings() { SaslKerberosPrincipal = "blah", GroupId = "group", ApiVersionRequest = false, IncludeSasl = false }; var objectUnderTest = new KafkaConfiguration(configSettings); var config = objectUnderTest.ConsumerConfig; Assert.Null(config.SaslKerberosPrincipal); }
public KafkaProducerService(ILogger <KafkaProducerService> logger, IOptions <KafkaConfiguration> kafkaConfigurationOptions , IScheduleConfig <KafkaProducerService> config, IServiceScopeFactory serviceScopeFactory ) : base(config.CronExpression, config.TimeZoneInfo) { _logger = logger ?? throw new ArgumentException(nameof(logger)); _kafkaConfiguration = kafkaConfigurationOptions?.Value ?? throw new ArgumentException(nameof(kafkaConfigurationOptions)); _serviceScopeFactory = serviceScopeFactory; Init(); }
public void Create_should_return_valid_instance() { var groupIdFactory = NSubstitute.Substitute.For <IGroupIdFactory>(); groupIdFactory.Create <IMessage>().Returns("ipsum"); var config = new KafkaConfiguration("lorem"); var sut = new ConsumerBuilderFactory(groupIdFactory, config); var result = sut.Create <IMessage, Guid, byte[]>(); result.Should().NotBeNull(); }
public void GetBootstrapServerString_ReturnsEmptyString_IfNoServerIsPresent() { //Arrange KafkaConfiguration config = new KafkaConfiguration(); //Act string bootstrapServerString = config.GetBootstrapServerString(); //Assert bootstrapServerString.Should().BeEmpty(); }