Esempio n. 1
0
 private static void Prod_MessageDiscarded(KafkaRecord <string, string> obj)
 {
     MessageBox.Show("kafkaB Message discarded");
 }
Esempio n. 2
0
        public void TestConsumer()
        {
            var client = new Mock <IClusterClient>();

            client.SetupGet(c => c.Messages)
            .Returns(Observable.FromEvent <RawKafkaRecord>(a => client.Object.MessageReceived += a,
                                                           a => client.Object.MessageReceived -= a));

            // Bad arguments
            Assert.That(() => new KafkaConsumer <string, string>(null, client.Object), Throws.ArgumentException);
            Assert.That(() => new KafkaConsumer <string, string>("", client.Object), Throws.ArgumentException);
            Assert.That(() => new KafkaConsumer <string, string>("toto", null), Throws.InstanceOf <ArgumentNullException>());

            using (var consumer = new KafkaConsumer <string, string>("topic", client.Object))
            {
                // Double new on same topic/TKey/TValue
                Assert.That(() => new KafkaConsumer <string, string>("topic", client.Object), Throws.ArgumentException);

                // Consume / Stop
                consumer.Consume(2, 42);
                consumer.ConsumeFromLatest();
                consumer.ConsumeFromLatest(2);
                consumer.ConsumeFromEarliest();
                consumer.ConsumeFromEarliest(2);
                consumer.StopConsume();
                consumer.StopConsume(2);
                consumer.StopConsume(2, 42);

                client.Verify(c => c.Consume(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <long>()), Times.Once());
                client.Verify(c => c.StopConsume("topic", 2, 42));
                client.Verify(c => c.ConsumeFromLatest(It.IsAny <string>()), Times.Once());
                client.Verify(c => c.ConsumeFromLatest("topic"));
                client.Verify(c => c.ConsumeFromLatest(It.IsAny <string>(), It.IsAny <int>()), Times.Once());
                client.Verify(c => c.ConsumeFromLatest("topic", 2));
                client.Verify(c => c.ConsumeFromEarliest(It.IsAny <string>()), Times.Once());
                client.Verify(c => c.ConsumeFromEarliest("topic"));
                client.Verify(c => c.ConsumeFromEarliest(It.IsAny <string>(), It.IsAny <int>()), Times.Once());
                client.Verify(c => c.ConsumeFromEarliest("topic", 2));

                client.Verify(c => c.StopConsume(It.IsAny <string>()), Times.Once());
                client.Verify(c => c.StopConsume("topic"));
                client.Verify(c => c.StopConsume(It.IsAny <string>(), It.IsAny <int>()), Times.Once());
                client.Verify(c => c.StopConsume("topic", 2));
                client.Verify(c => c.StopConsume(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <long>()), Times.Once());
                client.Verify(c => c.StopConsume("topic", 2, 42));

                bool messageObserved = false;
                bool messageEvent    = false;
                KafkaRecord <string, string> received = default(KafkaRecord <string, string>);

                consumer.MessageReceived += kr =>
                {
                    received     = kr;
                    messageEvent = true;
                };
                consumer.Messages.Subscribe(kr =>
                {
                    messageObserved = true;
                });

                var record = new RawKafkaRecord
                {
                    Topic     = "topic",
                    Key       = "key",
                    Value     = "data",
                    Partition = 2,
                    Offset    = 42
                };

                client.Raise(c => c.MessageReceived += null, record);

                Assert.IsTrue(messageEvent);
                Assert.IsTrue(messageObserved);
                Assert.AreEqual("topic", received.Topic);
                Assert.AreEqual("key", received.Key);
                Assert.AreEqual("data", received.Value);
                Assert.AreEqual(2, received.Partition);
                Assert.AreEqual(42, received.Offset);

                record.Key      = null;
                messageObserved = false;
                messageEvent    = false;
                received        = default(KafkaRecord <string, string>);
                client.Raise(c => c.MessageReceived += null, record);

                Assert.IsTrue(messageEvent);
                Assert.IsTrue(messageObserved);
                Assert.IsTrue(messageEvent);
                Assert.IsTrue(messageObserved);
                Assert.AreEqual("topic", received.Topic);
                Assert.IsNull(received.Key);
                Assert.AreEqual("data", received.Value);
                Assert.AreEqual(2, received.Partition);
                Assert.AreEqual(42, received.Offset);
            }

            // Dispose: can register another producer with same Topic/TKey/TValue once
            // the previous one has been disposed.
            client = new Mock <IClusterClient>();
            client.SetupGet(c => c.Messages)
            .Returns(Observable.FromEvent <RawKafkaRecord>(a => client.Object.MessageReceived += a,
                                                           a => client.Object.MessageReceived -= a));
            var consumer2 = new KafkaConsumer <string, string>("topic", client.Object);

            // Dispose: observable are completed and events no longer subscribed
            bool messageCompleted = false;
            bool messageEvent2    = false;

            consumer2.Messages.Subscribe(kr => { }, () => messageCompleted = true);
            consumer2.MessageReceived += _ => messageEvent2 = true;
            consumer2.Dispose();

            client.Verify(c => c.StopConsume(It.IsAny <string>()), Times.Once()); // Dispose stops all
            client.Verify(c => c.StopConsume("topic"), Times.Once());

            var record2 = new RawKafkaRecord
            {
                Topic     = "topic",
                Key       = "key",
                Value     = "data",
                Partition = 2,
                Offset    = 42
            };

            client.Raise(c => c.MessageReceived += null, record2);

            Assert.IsTrue(messageCompleted);
            Assert.IsFalse(messageEvent2);

            // Consume / Stop no longer work
            consumer2.Consume(2, 42);
            consumer2.ConsumeFromLatest();
            consumer2.ConsumeFromLatest(2);
            consumer2.ConsumeFromEarliest();
            consumer2.ConsumeFromEarliest(2);
            consumer2.StopConsume();
            consumer2.StopConsume(2);
            consumer2.StopConsume(2, 42);

            client.Verify(c => c.Consume(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <long>()), Times.Never());
            client.Verify(c => c.ConsumeFromLatest(It.IsAny <string>()), Times.Never());
            client.Verify(c => c.ConsumeFromLatest(It.IsAny <string>(), It.IsAny <int>()), Times.Never());
            client.Verify(c => c.ConsumeFromEarliest(It.IsAny <string>()), Times.Never());
            client.Verify(c => c.ConsumeFromEarliest(It.IsAny <string>(), It.IsAny <int>()), Times.Never());

            client.Verify(c => c.StopConsume(It.IsAny <string>()), Times.Once());
            client.Verify(c => c.StopConsume(It.IsAny <string>(), It.IsAny <int>()), Times.Never());
            client.Verify(c => c.StopConsume(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <long>()), Times.Never());

            // Dispose: can dispose the same consumer multiple times with no effect
            Assert.That(() => consumer2.Dispose(), Throws.Nothing);
        }
Esempio n. 3
0
 private static void Prod_MessageExpired(KafkaRecord <string, string> obj)
 {
     MessageBox.Show("kafkaB Message expired");
 }