public UserProducer(KafkaConfig producerConfig) { _producerConfig = new ProducerConfig { BootstrapServers = "kafka:9092" }; Console.WriteLine("added config: " + _producerConfig.BootstrapServers); }
static async Task Main(string[] args) { var cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; // prevent the process from terminating. cts.Cancel(); }; var config = new KafkaConfig(); Console.WriteLine($"UserProducer producing on {config.UsersTopic}. Enter user names, Ctrl+C to exit."); var cache = new SubjectNameSchemaCache(); cache.Init(config.UsersTopic); var userProducer = new UserProducer ( config: config, cts: cts, name: "UserProducer", topicName: config.UsersTopic, cache: cache ); await userProducer.Produce(); }
public MessagingQueue(ILogger <MessagingQueue> logger, KafkaConfig config, IMediator mediator, IMessageSerializer messageSerializer) { this.logger = logger; this.kafkaConfig = config; this.mediator = mediator; this.messageSerializer = messageSerializer; }
public KafkaConsumer(KafkaConfig kafkaConfig, ProtoKnownTypes knownTypes, ILogger <KafkaConsumer> logger) { Logger = logger; KafkaConfig = kafkaConfig; ProtoKnownTypes = knownTypes; ConsumeChannel = Channel.CreateBounded <(Headers, string, Action)>(10); }
/// <summary> /// Initializes a new instance of the <see cref="KafkaConsumerManager"/> class. /// </summary> /// <param name="logger">The log handler for the controller.</param> /// <param name="serviceFactory">Create an Microsoft.Extensions.DependencyInjection.IServiceScope which /// contains an System.IServiceProvider used to resolve dependencies from a newly /// created scope.</param> /// <param name="serviceConfig">The service config settings <see cref="KafkaConfig"/></param> public KafkaConsumerManager( ILogger <KafkaConsumerManager> logger, IServiceScopeFactory serviceFactory, KafkaConfig serviceConfig) { _logger = logger; _serviceFactory = serviceFactory; _serviceConfig = serviceConfig; }
public MessagingQueue( ILogger <MessagingQueue> logger, KafkaConfig config, IRootSchemeEventHandler schemeEventHandler) { this.logger = logger; this.kafkaConfig = config; this.schemeEventHandler = schemeEventHandler; }
public KafkaMessageBroker(KafkaConfig config, IJsonSerializer jsonSerializer) { _config = config; _jsonSerializer = jsonSerializer; _producer = new ProducerBuilder <Null, string>(new ProducerConfig() { BootstrapServers = config.BootstrapServers }).Build(); }
public void TestKafkaConsumer() { Rootobject msg = new Rootobject(); IKafkaConfig kconf = new KafkaConfig(conf); KProcessor worker = new KProcessor(); KafkaConsumer <Rootobject> kafkaConsumer = new KafkaConsumer <Rootobject>(kconf, msg, worker); kafkaConsumer.ConsumeMessage(); }
public MessagingQueue( ILogger <MessagingQueue> logger, KafkaConfig config, IMessageHandler eventHandler) { this.logger = logger; this.kafkaConfig = config; this.eventHandler = eventHandler; }
public KafkaConsumer(IOptions <KafkaConfig> options, ILoggerFactory logger, ConsumerDiagnostic consumerDiagnostic, string topicName) { _logger = logger.CreateLogger(nameof(KafkaConsumer)); _configuration = options.Value; _consumerDiagnostic = consumerDiagnostic; _topicName = topicName; OnError += Error; OnConsumeError += ConsumeError; OnLog += Log; }
public KafkaPublisherMessageBroker(KafkaConfig config, IJsonSerializer jsonSerializer) { _jsonSerializer = jsonSerializer; _producer = new ProducerBuilder <Null, string>(new ProducerConfig() { BootstrapServers = config.BootstrapServers }).Build(); _topic = config.MatchedOrdersTopic; }
public TopicBroker(ILogger <TopicBroker> logger, IMessageBuilder messageBuilder, IOptions <KafkaConfig> kafkaConfig) { _logger = logger; _messageBuilder = messageBuilder; _kafkaConfig = kafkaConfig.Value; _clientConfig = CreateClientConfig(); _producerConfig = CreateProducerConfig(); }
public DomainEventDispatcher( KafkaConfig kafkaConfig, IMessageSerializer messageSerializer) { this.kafkaConfig = kafkaConfig; this.messageSerializer = messageSerializer; this.producer = new Producer <string, string>( kafkaConfig.KAFKA_EVENTS_PRODUCER_CONFIG, new StringSerializer(Encoding.UTF8), new StringSerializer(Encoding.UTF8)); }
internal static IServiceCollection AddKafka(this IServiceCollection services, KafkaConfig kafkaConfig, IEnumerable <string> searchPatterns) { services.AddTransient <KafkaConsumer>(); services.AddTransient <KafkaProducer>(); services.AddSingleton(sp => new ProtoKnownTypes(searchPatterns.ToArray())); services.AddSingleton(sp => kafkaConfig); return(services); }
public static IHostBuilder UseKafka(this IHostBuilder host, KafkaConfig kafkaConfig, params string[] searchPatterns) { host.ConfigureServices((host, services) => { services.AddKafka(kafkaConfig, searchPatterns.Append("SeungYongShim.Kafka*.dll")); }); return(host); }
/// <summary> /// Used for creating the configuration dictionary used by the kafka consumer. /// </summary> /// <param name="configuration"></param> /// <returns>The configuration setup on startup.</returns> protected virtual Dictionary<string, object> CreateConfiguration(KafkaConfig configuration) { return new Dictionary<string, object> { { "group.id", configuration.GroupId }, { "enable.auto.commit", configuration.EnableAutoCommit }, { "auto.commit.interval.ms", configuration.AutoCommitIntervalMs }, { "statistics.interval.ms", configuration.StatisticsIntervalMs }, { "bootstrap.servers", configuration.BrokerList }, { "default.topic.config", configuration.TopicConfigs } }; }
public KafkaProducer(IOptions <KafkaConfig> options) { _config = options.Value; var config = new Dictionary <string, object> { { "bootstrap.servers", _config.BootstrapServers }, { "delivery.report.only.error", false } }; _producer = new Producer <Null, string>(config, null, new StringSerializer(Encoding.UTF8)); }
/// <summary> /// Extension method used for setting up kafka in the middleware. /// </summary> /// <param name="services">The service collection.</param> /// <param name="Configuration">The configuration for settings.</param> public static IServiceCollection AddKafkaServices <E>(this IServiceCollection services, IConfiguration Configuration) where E : KafkaConsumer { var kafkaConfig = new KafkaConfig(); Configuration.GetSection("Kafka").Bind(kafkaConfig); services.AddSingleton <KafkaConfig>(kafkaConfig); services.AddSingleton <IKafkaConsumerManager, KafkaConsumerManager>(); services.AddSingleton <IKafkaProducer, KafkaProducer>(); services.AddTransient <IKafkaConsumer, E>(); return(services); }
public KafkaProducer(KafkaConfig kafkaConfig, ILogger <KafkaConsumer> logger) { KafkaConfig = kafkaConfig; _log = logger; var config = new ProducerConfig { BootstrapServers = kafkaConfig.Brokers, Acks = Acks.All, }; Producer = new ProducerBuilder <string, string>(config).Build(); }
public RetryConsumerWorker(KafkaConfig option, ILogger <RetryConsumerWorker <TKey, TValue> > logger, IMessageHandler <TKey, TValue> messageHandler, IDeserializer <TKey> keyDersializer, IDeserializer <TValue> valueDersializer, IKafkaDependentProducer <TKey, TValue> producer) : base(option, logger, messageHandler, keyDersializer, valueDersializer) { _producer = producer; var retry = MessageHandler.GetType().GetCustomAttribute <RetryAttribute>(); _maxRetry = retry.MaxRetry; _retryWait = retry.Wait; _dlqTopic = $"{Topic}__{option.Consumer.GroupId}__{retry.DlqPostfix}"; _retryTopic = $"{Topic}__{option.Consumer.GroupId}__{retry.RetryPostfix}"; }
public InventoryKafkaService(CafeKafkaSettings cafeKafkaSettings) { _stockRoom = new StockRoom(); try { _consumerConfig = KafkaConfig.CreateConsumerConfig(cafeKafkaSettings); _producerConfig = KafkaConfig.CreateProducerConfig(cafeKafkaSettings); Console.WriteLine("Read Kafka Bootstrap: " + cafeKafkaSettings.BootstrapServers); } catch (Exception ex) { Console.WriteLine("Exception in kafka settings: " + ex); } }
public BaristaKafkaService(CafeKafkaSettings cafeKafkaSettings) { _barista = new Barista(); try { _consumerConfig = KafkaConfig.CreateConsumerConfig(cafeKafkaSettings); _producerConfig = KafkaConfig.CreateProducerConfig(cafeKafkaSettings); Console.WriteLine("Read Kafka Bootstrap: " + cafeKafkaSettings.BootstrapServers); } catch (Exception ex) { Console.WriteLine("Exception in kafka settings: " + ex); } }
public KitchenKafkaService(CafeKafkaSettings cafeKafkaSettings) { _kitchen = new Kitchen(); try { _consumerConfig = KafkaConfig.CreateConsumerConfig(cafeKafkaSettings); _producerConfig = KafkaConfig.CreateProducerConfig(cafeKafkaSettings); Console.WriteLine(DateTime.Now + " - Read Kafka Bootstrap: " + cafeKafkaSettings.BootstrapServers); } catch (Exception ex) { Console.WriteLine("Exception in kafka settings: " + ex); } }
public FootballDataTarget(ILogger <FootballDataTarget> logger, KafkaConfig kafkaConfig) { var config = new ProducerConfig { BootstrapServers = kafkaConfig.BootstrapServers, SecurityProtocol = SecurityProtocol.SaslSsl, SaslMechanism = SaslMechanism.Plain, SaslUsername = kafkaConfig.Username, SaslPassword = kafkaConfig.Password }; _topicName = kafkaConfig.Topic; _producer = new ProducerBuilder <int, string>(config).Build(); _logger = logger; }
// integrity of KafkaConfig type public void Test_KafkaConfig_Model() { var kafkaConfig = new KafkaConfig { BootstrapServers = "", ClientId = "", GroupId = "", Topic = "" }; Assert.True(kafkaConfig.BootstrapServers != null); Assert.True(kafkaConfig.ClientId != null); Assert.True(kafkaConfig.GroupId != null); Assert.True(kafkaConfig.Topic != null); Assert.True(kafkaConfig.BootstrapServers is string); Assert.True(kafkaConfig.ClientId is string); Assert.True(kafkaConfig.GroupId is string); Assert.True(kafkaConfig.Topic is string); }
public KafkaConsumerMessageBroker(KafkaConfig config, IJsonSerializer jsonSerializer, IBidPlacedHandler bidPlacedHandler, IAskPlacedHandler askPlacedHandler) { _config = config; _jsonSerializer = jsonSerializer; _bidPlacedHandler = bidPlacedHandler; _askPlacedHandler = askPlacedHandler; _consumerConfig = new ConsumerConfig { BootstrapServers = config.BootstrapServers, GroupId = config.ConsumerGroupId, EnableAutoCommit = false, StatisticsIntervalMs = 5000, SessionTimeoutMs = 6000, AutoOffsetReset = AutoOffsetReset.Latest, EnablePartitionEof = false }; }
public ConsumerWorkerBase(KafkaConfig option, ILogger <IConsumerWorker <TKey, TValue> > logger, IMessageHandler <TKey, TValue> messageHandler, IDeserializer <TKey> keyDersializer, IDeserializer <TValue> valueDersializer) { Config = option.Consumer; Consumer = new ConsumerBuilder <TKey, TValue>(Config) .SetKeyDeserializer(keyDersializer) .SetValueDeserializer(valueDersializer) .Build(); MessageHandler = messageHandler; var topic = messageHandler.GetType().GetCustomAttribute <TopicAttribute>(); Topic = topic.Name; Logger = logger; MessageHandler = messageHandler; }
public KafkaConsumer(IOptions <KafkaConfig> kafkaConfig, ILogger <HostedConsumerService> logger) { _kafkaConfig = kafkaConfig.Value; _logger = logger; var consumerConfig = new ConsumerConfig { BootstrapServers = _kafkaConfig.BootstrapServers, Acks = Acks.All, ClientId = Dns.GetHostName(), CheckCrcs = true, EnableAutoCommit = false, GroupId = "DemoConsumerGroup", AutoOffsetReset = AutoOffsetReset.Earliest, FetchMinBytes = 512, FetchWaitMaxMs = 150 }; _consumer = new ConsumerBuilder <Ignore, string>(consumerConfig).Build(); }
public async Task It_should_list_all_types() { var cts = new CancellationTokenSource(); Console.CancelKeyPress += (_, e) => { e.Cancel = true; // prevent the process from terminating. cts.Cancel(); }; var config = new KafkaConfig(); var userConsumer = new DebeziumConsumer ( config: config, cts: cts, name: "UserConsumer", topicName: "workforce.recruit.candidate_rates" ); await userConsumer.Consume(); }
public async Task Get(CancellationToken cancellationToken) { var response = Response; response.StatusCode = 200; response.Headers.Add("Content-Type", "text/event-stream"); response.Headers.Add("Cache-Control", "no-cache"); response.Headers.Add("Connection", "keep-alive"); String kafkaBootstrap = Environment.GetEnvironmentVariable("DOTNET_CAFE_KAFKA_BOOTSTRAP") ?? "127.0.0.1:9099"; var cafeKafkaSettings = new CafeKafkaSettings(kafkaBootstrap); var consumerConfig = KafkaConfig.CreateConsumerConfig(cafeKafkaSettings); Task.Run(() => { ConsumeKafka(consumerConfig, cancellationToken); }); await response.Body.FlushAsync(); while (!cancellationToken.IsCancellationRequested) { while (queue.TryDequeue(out var message)) { OrderUpEvent item = JsonSerializer.Deserialize <OrderUpEvent>(message); DashboardUpdate dashboardUpdate = new DashboardUpdate(item); var serializerOptions = new JsonSerializerOptions { Converters = { new JsonStringEnumConverter() }, IgnoreNullValues = true }; String dashboardUpdateJson = JsonSerializer.Serialize(dashboardUpdate, serializerOptions); await response.WriteAsync($"data:{dashboardUpdateJson} \n\n"); await response.Body.FlushAsync(); } await Task.Delay(1 * 1000); } }