public async Task KafkaMessagingTest001_PublishConsumeUsingStringMessage_ExpectNoExceptions()
        {
            this.testLogger.LogDebug("*** KafkaMessagingTest001 ***");

            var topic = $"{TestTopic}_Test001_{Guid.NewGuid()}";

            AppEventArgs <Customer> evt = this.__getEvent();

            var kProducer      = new app.common.messaging.simple.KafkaProducer <AppEventArgs <Customer> >(this.loggerFactory);
            var producerConfig = new Dictionary <string, object>
            {
                { "bootstrap.servers", this.serverAddress }
            };

            kProducer.Setup(producerConfig);
            await kProducer.ProduceAsync(topic, evt);

            kProducer.Dispose();

            var kConsumer      = new app.common.messaging.simple.KafkaConsumer(this.loggerFactory);
            var customerConfig = new Dictionary <string, object>
            {
                { "bootstrap.servers", this.serverAddress },
                { "auto.commit.interval.ms", 5000 },

                { "auto.offset.reset", "earliest" },
                { "group.id", "test-consumer-group" }
            };

            var consumeTopics = new List <string>()
            {
                topic
            };

            kConsumer.Setup(customerConfig, consumeTopics);

            //Consume yield returns a list of messages
            foreach (var message in kConsumer.Consume(this.cts.Token))
            {
                this.testLogger.LogDebug($"Response: Partition:{message.Partition}, Offset:{message.Offset} :: {message.Message}");
                var payload = AppEventArgs <Customer> .FromJson(message.Message);

                if (payload.id == evt.id)
                {
                    this.testLogger.LogDebug($"Received EventID:{evt.id} from Kafka");
                    // break;
                    this.cts.Cancel();
                }
            }

            kConsumer.Dispose();
        }
        public async Task KafkaMessagingTest004_PublishConsumeBulkMessagesUsingWire_ExpectNoExceptions()
        {
            this.testLogger.LogDebug("*** KafkaMessagingTest004 ***");

            // Use pre-defined test topic - checkout performance
            var       topic     = TestTopic;
            const int testCount = 1000;
            var       kProducer = new app.common.messaging.simple.KafkaProducer <AppEventArgs <Customer> >(this.loggerFactory);


            // Config for fast synchronous write without buffering
            var producerConfig = new Dictionary <string, object>
            {
                { "bootstrap.servers", this.serverAddress },

                { "retries", 0 },
                { "socket.blocking.max.ms", 1 },
                { "queue.buffering.max.ms", 0 },
                { "batch.num.messages", 1 },
                { "socket.nagle.disable", true }
            };

            kProducer.Setup(producerConfig);

            // Generate 1000 events
            var opTimer = Stopwatch.StartNew();

            Task[] tasks = new Task[testCount];
            for (int i = 0; i < testCount; i++)
            {
                AppEventArgs <Customer> evt = this.__getEvent();

                // We want these events going off as soon as possible
                tasks[i] = kProducer.ProduceAsync(topic, evt);
                // kProducer.ProduceAsync(topic, evt);
            }
            await Task.WhenAll(tasks);

            tasks = null;
            opTimer.Stop();

            this.testLogger.LogInformation($"KafkaProducer ::Took {opTimer.Elapsed.TotalSeconds} sec to send {testCount} events");
            kProducer.Dispose();

            // Test Wire based Kafka Producer
            var kProducer2 = new app.common.messaging.generic.KafkaProducer <AppEventArgs <Customer> >(this.loggerFactory);

            kProducer2.Setup(producerConfig);

            // Generate 1000 events
            opTimer = Stopwatch.StartNew();
            tasks   = new Task[testCount];
            for (int i = 0; i < testCount; i++)
            {
                AppEventArgs <Customer> evt = this.__getEvent();

                // We want these events going off as soon as possible
                tasks[i] = kProducer2.ProduceAsync(topic, evt);
                // kProducer.ProduceAsync(topic, evt);
            }
            await Task.WhenAll(tasks);

            opTimer.Stop();
            this.testLogger.LogInformation($"KafkaProducer2::Took {opTimer.Elapsed.TotalSeconds} sec to send {testCount} events");
            kProducer2.Dispose();
        }
        public async Task KafkaMessagingTest002_PublishConsumeBulkMessages_ExpectNoExceptions()
        {
            this.testLogger.LogDebug("*** KafkaMessagingTest002 ***");

            var topic = $"{TestTopic}_Test002_{Guid.NewGuid()}";

            var kProducer = new app.common.messaging.simple.KafkaProducer <AppEventArgs <Customer> >(this.loggerFactory);

            // Config for fast synchronous write without buffering
            var producerConfig = new Dictionary <string, object>
            {
                { "bootstrap.servers", this.serverAddress },

                { "retries", 0 },
                { "queue.buffering.max.ms", 0 },
                { "batch.num.messages", 1 },
                { "socket.nagle.disable", true }
            };

            kProducer.Setup(producerConfig);

            // Generate 100 events
            var opTimer = Stopwatch.StartNew();

            for (int i = 0; i < 100; i++)
            {
                AppEventArgs <Customer> evt = this.__getEvent();

                // We want these events going off as soon as possible
                await kProducer.ProduceAsync(topic, evt);

                // kProducer.ProduceAsync(topic, evt);
            }
            opTimer.Stop();
            this.testLogger.LogInformation($"Took {opTimer.Elapsed.TotalSeconds} sec to send 100 events");
            kProducer.Dispose();

            var kConsumer      = new app.common.messaging.simple.KafkaConsumer(this.loggerFactory);
            var customerConfig = new Dictionary <string, object>
            {
                { "bootstrap.servers", this.serverAddress },
                { "auto.commit.interval.ms", 5000 },

                { "auto.offset.reset", "earliest" },
                { "group.id", "test-consumer-group" }
            };

            var consumeTopics = new List <string>()
            {
                topic
            };

            kConsumer.Setup(customerConfig, consumeTopics);

            List <AppEventArgs <Customer> > events = new List <AppEventArgs <Customer> >();

            //Consume yield returns a list of messages

            opTimer = Stopwatch.StartNew();
            foreach (var message in kConsumer.Consume(this.cts.Token))
            {
                this.testLogger.LogTrace(LoggingEvents.Trace, $"Response: Partition:{message.Partition}, Offset:{message.Offset} :: {message.Message}");
                events.Add(AppEventArgs <Customer> .FromJson(message.Message));

                if (events.Count == 100)
                {
                    opTimer.Stop();

                    this.testLogger.LogDebug($"Received 1000 events from Kafka");
                    this.testLogger.LogInformation($"Took {opTimer.Elapsed.TotalSeconds} sec to receive 100 events");

                    // break;
                    this.cts.Cancel();
                }
            }

            kConsumer.Dispose();
        }