protected override async Task ExecuteAsync(CancellationToken stoppingToken)
        {
            await Task.Yield();       // Avoid blocking entirely, see https://github.com/dotnet/runtime/issues/36063

            await consumer.Consume(stoppingToken, "TransactionReply", replyGroup.MyUniqueConsumerGroup, "transaction-replies", async msg =>
            {
                await ProcessMessage(msg);
            });
        }
Esempio n. 2
0
        protected override async Task ExecuteAsync(CancellationToken stoppingToken)
        {
            await Task.Yield();       // Avoid blocking entirely, see https://github.com/dotnet/runtime/issues/36063

            await requestConsumer.Consume(stoppingToken,
                                          "TransactionReply",
                                          "transactionservice",
                                          "transaction-requests", async cr =>
            {
                await ProcessMessage(cr);
            });
        }
Esempio n. 3
0
        protected override async Task ExecuteAsync(CancellationToken stoppingToken)
        {
            _logger.LogInformation($"Starting EventManager service. Consuming to the following topics [{string.Join(", ", _configuration.Topics)}]");

            await Task.Run(() =>
            {
                while (!stoppingToken.IsCancellationRequested)
                {
                    _consumer.Consume(stoppingToken);
                }
            }, stoppingToken);
        }
Esempio n. 4
0
        protected override async Task ExecuteAsync(CancellationToken stoppingToken)
        {
            await Task.Yield(); // Avoid blocking entirely, see https://github.com/dotnet/runtime/issues/36063

            await consumer.Consume(stoppingToken,
                                   "TransactionCreated",
                                   "highvaluenotification",
                                   "TRANSACTIONS_HIGHVALUE",
                                   async cr =>
            {
                await ProcessMessage(cr);
            });
        }
Esempio n. 5
0
        static async Task Main(string[] args)
        {
            string BootstrapServers = "10.132.12.11:9092,10.132.12.7:9092,10.132.12.8:9092";// "10.125.234.185:9092,10.125.233.31:9092,10.125.232.186:9092";
            //定义一个生产者
            var config = new KafkaProducerConfigForCredit
            {
                BrokerServers    = BootstrapServers,
                SaslUsername     = "******",
                SaslPassword     = "******",
                SecurityProtocol = Confluent.Kafka.SecurityProtocol.SaslPlaintext,
                SaslMechanism    = Confluent.Kafka.SaslMechanism.Plain,
                TopicName        = "yudiefly-home-app"
            };

            var producer = new KafkaProducer <string>(config);

            producer.KakfaProducerSucces    += Producer_KakfaProducerSucces;
            producer.KakfaProducerException += Producer_KakfaProducerException;
            for (int i = 0; i < 50; i++)
            {
                //await producer.ProduceAsync("i=" + i.ToString(), "");// "key_" + i.ToString()
                producer.Produce("TT-i:" + i.ToString(), "my-key:" + i.ToString());
            }

            //定义一个消费者
            var consumer = new KafkaConsumer <string>(new KafkaConsumerConfigForCredit
            {
                #region 这几个参数可以不指定(取默认也可以)
                //GroupId = Guid.NewGuid().ToString(),
                //SessionTimeoutMs = 6000,
                //StatisticsIntervalMs = 6000,
                #endregion

                BrokerServers    = BootstrapServers,
                SaslUsername     = "******", //NPtaqalu
                SaslPassword     = "******", //Y8gutwQQuPNmUYWC
                SecurityProtocol = Confluent.Kafka.SecurityProtocol.SaslPlaintext,
                SaslMechanism    = Confluent.Kafka.SaslMechanism.Plain,
                Topics           = new System.Collections.Generic.List <string>()
                {
                    "yudiefly-home-app"
                }
            });

            consumer.OnMessage += Consumer_OnMessage;
            consumer.KakfaConsumerException += Consumer_KakfaConsumerException;
            consumer.Consume();

            Console.ReadLine();
        }
Esempio n. 6
0
        static void Main(string[] args)
        {
            var consumer = new KafkaConsumer <string, string>();

            consumer.Subscribe("my-topic-to-subscribe");

            var result = consumer.Consume();

            if (result != null)
            {
                consumer.CommitResult(result);
            }

            Console.WriteLine(result?.Message);
        }
Esempio n. 7
0
        static void Main(string[] args)
        {
            var config = new ConsumerConfig
            {
                BootstrapServers = "132.232.27.116:9092",
                GroupId          = "yaopeng",
                AutoOffsetReset  = AutoOffsetReset.Earliest
            };

            string text;

            Console.WriteLine("接受中......");
            while ((text = Console.ReadLine()) != "q")
            {
                using (var kafkaProducer = new KafkaConsumer(config, "topic-d"))
                {
                    var result = kafkaProducer.Consume <object>();
                    if (result != null)
                    {
                        Console.WriteLine(result.ToString());
                    }
                }
            }
        }
Esempio n. 8
0
        static void Main(string[] args)
        {
            CancellationTokenSource cts = new CancellationTokenSource();

            Console.CancelKeyPress += (_, e) =>
            {
                e.Cancel = true;                 // prevent the process from terminating.
                cts.Cancel();
            };
            var globalScope   = ConfigureServices();
            var loggerFactory = globalScope.ServiceProvider.GetRequiredService <ILoggerFactory>();
            ILogger <KafkaProducer> loggerProducer = loggerFactory.CreateLogger <KafkaProducer>();
            var loggerConsumer = loggerFactory.CreateLogger <KafkaConsumer>();

            using (var dependentKafkaProducer = new KafkaProducer(_kafkaEndpoint, loggerProducer))
                using (var producer = new KafkaProducer(dependentKafkaProducer))         // for test dependentKafkaProducer
                    using (KafkaConsumer consumer = new KafkaConsumer(_kafkaEndpoint, loggerConsumer)
                           , consumer2 = new KafkaConsumer(_kafkaEndpoint, loggerConsumer))
                    {
                        consumer.Consume(new[] { bTopicNameResp })
                        .Subscribe(message => Console.WriteLine($"Boy name {message.Value} is recommended"), cts.Token);
                        consumer2.Consume(new[] { gTopicNameResp })
                        .Subscribe(message => Console.WriteLine($"Girl name {message.Value} is recommended"), cts.Token);

                        string userInput;
                        var    rnd = new Random();
                        do
                        {
                            Console.WriteLine(userHelpMsg);
                            userInput = Console.ReadLine();
                            switch (userInput)
                            {
                            case "b":
                                var    nameCount = 1000;
                                Task[] jobs      = Enumerable.Range(0, nameCount)
                                                   .Select(i => new Message <Null, string> {
                                    Value = $"{i:D4} {_boyNames[rnd.Next(0, 5)]}"
                                })
                                                   .Select(m => producer.ProduceAsync(bTopicNameResp, m))
                                                   .ToArray();
                                Stopwatch sw = Stopwatch.StartNew();
                                Task.WaitAll(jobs);
                                Console.WriteLine($"Sending {nameCount} за {sw.ElapsedMilliseconds / 1000:N3}с");
                                break;

                            case "g":
                                producer.ProduceAsync(gTopicNameResp, new Message <Null, string> {
                                    Value = _girlNames[rnd.Next(0, 5)]
                                }
                                                      , cts.Token).GetAwaiter().GetResult();
                                break;

                            case "q":
                                break;

                            default:
                                Console.WriteLine($"Unknown command.");
                                break;
                            }
                        } while (userInput != "q");
                    }
        }
Esempio n. 9
0
        public void TestConsumer()
        {
            var client = new Mock <IClusterClient>();

            client.SetupGet(c => c.Messages)
            .Returns(Observable.FromEvent <RawKafkaRecord>(a => client.Object.MessageReceived += a,
                                                           a => client.Object.MessageReceived -= a));

            // Bad arguments
            Assert.That(() => new KafkaConsumer <string, string>(null, client.Object), Throws.ArgumentException);
            Assert.That(() => new KafkaConsumer <string, string>("", client.Object), Throws.ArgumentException);
            Assert.That(() => new KafkaConsumer <string, string>("toto", null), Throws.InstanceOf <ArgumentNullException>());

            using (var consumer = new KafkaConsumer <string, string>("topic", client.Object))
            {
                // Double new on same topic/TKey/TValue
                Assert.That(() => new KafkaConsumer <string, string>("topic", client.Object), Throws.ArgumentException);

                // Consume / Stop
                consumer.Consume(2, 42);
                consumer.ConsumeFromLatest();
                consumer.ConsumeFromLatest(2);
                consumer.ConsumeFromEarliest();
                consumer.ConsumeFromEarliest(2);
                consumer.StopConsume();
                consumer.StopConsume(2);
                consumer.StopConsume(2, 42);

                client.Verify(c => c.Consume(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <long>()), Times.Once());
                client.Verify(c => c.StopConsume("topic", 2, 42));
                client.Verify(c => c.ConsumeFromLatest(It.IsAny <string>()), Times.Once());
                client.Verify(c => c.ConsumeFromLatest("topic"));
                client.Verify(c => c.ConsumeFromLatest(It.IsAny <string>(), It.IsAny <int>()), Times.Once());
                client.Verify(c => c.ConsumeFromLatest("topic", 2));
                client.Verify(c => c.ConsumeFromEarliest(It.IsAny <string>()), Times.Once());
                client.Verify(c => c.ConsumeFromEarliest("topic"));
                client.Verify(c => c.ConsumeFromEarliest(It.IsAny <string>(), It.IsAny <int>()), Times.Once());
                client.Verify(c => c.ConsumeFromEarliest("topic", 2));

                client.Verify(c => c.StopConsume(It.IsAny <string>()), Times.Once());
                client.Verify(c => c.StopConsume("topic"));
                client.Verify(c => c.StopConsume(It.IsAny <string>(), It.IsAny <int>()), Times.Once());
                client.Verify(c => c.StopConsume("topic", 2));
                client.Verify(c => c.StopConsume(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <long>()), Times.Once());
                client.Verify(c => c.StopConsume("topic", 2, 42));

                bool messageObserved = false;
                bool messageEvent    = false;
                KafkaRecord <string, string> received = default(KafkaRecord <string, string>);

                consumer.MessageReceived += kr =>
                {
                    received     = kr;
                    messageEvent = true;
                };
                consumer.Messages.Subscribe(kr =>
                {
                    messageObserved = true;
                });

                var record = new RawKafkaRecord
                {
                    Topic     = "topic",
                    Key       = "key",
                    Value     = "data",
                    Partition = 2,
                    Offset    = 42
                };

                client.Raise(c => c.MessageReceived += null, record);

                Assert.IsTrue(messageEvent);
                Assert.IsTrue(messageObserved);
                Assert.AreEqual("topic", received.Topic);
                Assert.AreEqual("key", received.Key);
                Assert.AreEqual("data", received.Value);
                Assert.AreEqual(2, received.Partition);
                Assert.AreEqual(42, received.Offset);

                record.Key      = null;
                messageObserved = false;
                messageEvent    = false;
                received        = default(KafkaRecord <string, string>);
                client.Raise(c => c.MessageReceived += null, record);

                Assert.IsTrue(messageEvent);
                Assert.IsTrue(messageObserved);
                Assert.IsTrue(messageEvent);
                Assert.IsTrue(messageObserved);
                Assert.AreEqual("topic", received.Topic);
                Assert.IsNull(received.Key);
                Assert.AreEqual("data", received.Value);
                Assert.AreEqual(2, received.Partition);
                Assert.AreEqual(42, received.Offset);
            }

            // Dispose: can register another producer with same Topic/TKey/TValue once
            // the previous one has been disposed.
            client = new Mock <IClusterClient>();
            client.SetupGet(c => c.Messages)
            .Returns(Observable.FromEvent <RawKafkaRecord>(a => client.Object.MessageReceived += a,
                                                           a => client.Object.MessageReceived -= a));
            var consumer2 = new KafkaConsumer <string, string>("topic", client.Object);

            // Dispose: observable are completed and events no longer subscribed
            bool messageCompleted = false;
            bool messageEvent2    = false;

            consumer2.Messages.Subscribe(kr => { }, () => messageCompleted = true);
            consumer2.MessageReceived += _ => messageEvent2 = true;
            consumer2.Dispose();

            client.Verify(c => c.StopConsume(It.IsAny <string>()), Times.Once()); // Dispose stops all
            client.Verify(c => c.StopConsume("topic"), Times.Once());

            var record2 = new RawKafkaRecord
            {
                Topic     = "topic",
                Key       = "key",
                Value     = "data",
                Partition = 2,
                Offset    = 42
            };

            client.Raise(c => c.MessageReceived += null, record2);

            Assert.IsTrue(messageCompleted);
            Assert.IsFalse(messageEvent2);

            // Consume / Stop no longer work
            consumer2.Consume(2, 42);
            consumer2.ConsumeFromLatest();
            consumer2.ConsumeFromLatest(2);
            consumer2.ConsumeFromEarliest();
            consumer2.ConsumeFromEarliest(2);
            consumer2.StopConsume();
            consumer2.StopConsume(2);
            consumer2.StopConsume(2, 42);

            client.Verify(c => c.Consume(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <long>()), Times.Never());
            client.Verify(c => c.ConsumeFromLatest(It.IsAny <string>()), Times.Never());
            client.Verify(c => c.ConsumeFromLatest(It.IsAny <string>(), It.IsAny <int>()), Times.Never());
            client.Verify(c => c.ConsumeFromEarliest(It.IsAny <string>()), Times.Never());
            client.Verify(c => c.ConsumeFromEarliest(It.IsAny <string>(), It.IsAny <int>()), Times.Never());

            client.Verify(c => c.StopConsume(It.IsAny <string>()), Times.Once());
            client.Verify(c => c.StopConsume(It.IsAny <string>(), It.IsAny <int>()), Times.Never());
            client.Verify(c => c.StopConsume(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <long>()), Times.Never());

            // Dispose: can dispose the same consumer multiple times with no effect
            Assert.That(() => consumer2.Dispose(), Throws.Nothing);
        }
Esempio n. 10
0
        static void Main(string[] args)
        {
            var consumer = new KafkaConsumer();

            consumer.Consume("test-topic");
        }
Esempio n. 11
0
        public void StartServer(CancellationToken cancellationToken)
        {
            this._logger.LogDebug(LoggingEvents.Debug, "Started Email Notification Server");
            Console.WriteLine(" *** Started App Email Notification Server ***");

            var kConsumer      = new KafkaConsumer <EmailEventArgs>(this._loggerFactory);
            var customerConfig = new Dictionary <string, object>
            {
                { "bootstrap.servers", this._kafkaServerAddress },
                { "group.id", this._notificationGroup },

                { "auto.commit.interval.ms", 5000 },
                { "auto.offset.reset", "earliest" }
            };

            var consumeTopics = new List <string>()
            {
                this._notificationTopic
            };

            kConsumer.Setup(customerConfig, consumeTopics);

            // This is the event handler for emailNotification queue. Make sure this does not throw exception
            // Kafka message handling block would not be responsible to handle any exceptions
            Func <KMessage <EmailEventArgs>, Task> notificationHandler = async(message) =>
            {
                this._logger.LogTrace(LoggingEvents.Trace, $"Response: Partition:{message.Partition}, Offset:{message.Offset} :: {message.Message}");

                var evt = message.Message;

                this._logger.LogDebug(LoggingEvents.Trace, $"Processing notification event {evt.id} with subject:{evt.subject}");

                if (evt.notifyTo == null || evt.notifyTo.Count == 0)
                {
                    this._logger.LogError(LoggingEvents.Critical, $"notifyTo list is not populated for the message: {message.Message}");
                    return;
                }

                try {
                    await this.mailService.SendEmailAsync(evt.notifyTo, evt.subject, evt.textMsg, evt.htmlMsg, evt.notifyCC, evt.notifyBCC);

                    this._logger.LogDebug(LoggingEvents.Trace, $"Processed notification event {evt.id}");
                }
                catch (Exception ex) {
                    var msg = $"Event:{evt.id} - Retry:{evt.retries +1} - Error:{ex.Message}";

                    this._logger.LogError(LoggingEvents.Error, ex, msg);

                    try {
                        evt.retries += 1;
                        if (evt.retryLog == null)
                        {
                            evt.retryLog = new List <string>();
                        }
                        evt.retryLog.Add(msg);

                        if (evt.retries > 3)
                        {
                            // Give up
                            await this._notificationProducer.ProduceAsync(this._notificationFailureTopic, evt);

                            this._logger.LogInformation(LoggingEvents.Critical, $"Stopping notification attempt for {evt.id} after {evt.retries} retries");
                        }
                        else
                        {
                            // Put the message back for retries
                            await this._notificationProducer.ProduceAsync(this._notificationTopic, evt);
                        }
                    }
                    catch (Exception ex2) {
                        this._logger.LogCritical(LoggingEvents.Critical, ex2, $"Event:{evt.id} - Retry:{evt.retries +1} - Error:{ex2.Message}");
                    }
                }
            };

            kConsumer.Consume(cancellationToken, notificationHandler, null, null);

            kConsumer.Dispose();
            Console.WriteLine(" *** Stopped App Email Notification Server ***");

            this._logger.LogDebug(LoggingEvents.Debug, "Stopped App Email Notification Server");
        }