Exemplo n.º 1
0
        public void StateStoreUpdateKey()
        {
            var source = new CancellationTokenSource();
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId = "test-config";
            config.PollMs        = 1;
            var topicConfiguration = config.Clone();

            topicConfiguration.ApplicationId = $"test-driver-{config.ApplicationId}";

            var builder = new StreamBuilder();

            builder.Table <string, string>("test", InMemory <string, string> .As("store"));
            var driver = new ClusterInMemoryTopologyDriver("client", builder.Build().Builder, config, topicConfiguration, TimeSpan.FromSeconds(1), source.Token);

            driver.StartDriver();
            var input = driver.CreateInputTopic("test", new StringSerDes(), new StringSerDes());
            var store = driver.GetStateStore <string, string>("store");

            Assert.IsNotNull(store);
            Assert.IsInstanceOf <MockReadOnlyKeyValueStore <string, string> >(store);
            input.PipeInput("coucou", "1");
            Thread.Sleep(100);

            Assert.AreEqual(1, ((MockReadOnlyKeyValueStore <string, string>)store).All().Count());
            Assert.AreEqual("1", ((MockReadOnlyKeyValueStore <string, string>)store).Get("coucou"));

            input.PipeInput("coucou", "2");
            Thread.Sleep(100);

            Assert.AreEqual(1, ((MockReadOnlyKeyValueStore <string, string>)store).All().Count());
            Assert.AreEqual("2", ((MockReadOnlyKeyValueStore <string, string>)store).Get("coucou"));
            source.Cancel();
            driver.Dispose();
        }
Exemplo n.º 2
0
        public void GetCurrentHeadersMetadataTests()
        {
            var source = new System.Threading.CancellationTokenSource();
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId  = "test";
            config.Guarantee      = ProcessingGuarantee.AT_LEAST_ONCE;
            config.PollMs         = 1;
            config.FollowMetadata = true;
            var configConsumer = config.Clone();

            configConsumer.ApplicationId = "test-consumer";
            Headers h       = null;
            Headers headers = new Headers();

            headers.Add("k", new byte[1] {
                13
            });

            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder
            .Stream <string, string>("topic")
            .MapValues((v) =>
            {
                h = StreamizMetadata.GetCurrentHeadersMetadata();
                return(v);
            })
            .To("output");

            var topo = builder.Build();

            var supplier = new SyncKafkaSupplier();
            var producer = supplier.GetProducer(config.ToProducerConfig());
            var consumer = supplier.GetConsumer(configConsumer.ToConsumerConfig(), null);

            var thread = StreamThread.Create(
                "thread-0", "c0",
                topo.Builder, config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                0) as StreamThread;

            thread.Start(source.Token);
            producer.Produce("topic", new Confluent.Kafka.Message <byte[], byte[]>
            {
                Key     = serdes.Serialize("key1", new SerializationContext()),
                Value   = serdes.Serialize("coucou", new SerializationContext()),
                Headers = headers
            });

            consumer.Subscribe("output");
            ConsumeResult <byte[], byte[]> result = null;

            do
            {
                result = consumer.Consume(100);
            } while (result == null);


            source.Cancel();
            thread.Dispose();

            Assert.NotNull(h);
            Assert.AreEqual(1, h.Count);
            Assert.AreEqual("k", h[0].Key);
            Assert.AreEqual(new byte[1] {
                13
            }, h[0].GetValueBytes());
        }
Exemplo n.º 3
0
        public void GetCurrentTimestampMetadataTests()
        {
            var source = new System.Threading.CancellationTokenSource();
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId  = "test";
            config.Guarantee      = ProcessingGuarantee.AT_LEAST_ONCE;
            config.PollMs         = 1;
            config.FollowMetadata = true;
            var configConsumer = config.Clone();

            configConsumer.ApplicationId = "test-consumer";
            long?    h  = null;
            DateTime dt = DateTime.Now;

            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder
            .Stream <string, string>("topic")
            .MapValues((v) =>
            {
                h = StreamizMetadata.GetCurrentTimestampMetadata();
                return(v);
            })
            .To("output");

            var topo = builder.Build();

            var supplier = new SyncKafkaSupplier();
            var producer = supplier.GetProducer(config.ToProducerConfig());
            var consumer = supplier.GetConsumer(configConsumer.ToConsumerConfig(), null);

            var thread = StreamThread.Create(
                "thread-0", "c0",
                topo.Builder, config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                0) as StreamThread;

            thread.Start(source.Token);
            producer.Produce("topic", new Confluent.Kafka.Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key1", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });

            consumer.Subscribe("output");
            ConsumeResult <byte[], byte[]> result = null;

            do
            {
                result = consumer.Consume(100);
            } while (result == null);


            source.Cancel();
            thread.Dispose();

            Assert.NotNull(h);
            // TODO FIX Assert.IsTrue(h.Value > dt.GetMilliseconds());
            Assert.IsTrue(h.Value > 0);
        }
        public void ThreadMetricsTest()
        {
            var serdes      = new StringSerDes();
            var cloneConfig = config.Clone();

            cloneConfig.ApplicationId = "consume-test";
            var producer = mockKafkaSupplier.GetProducer(cloneConfig.ToProducerConfig());
            var consumer = mockKafkaSupplier.GetConsumer(cloneConfig.ToConsumerConfig("test-consum"), null);

            consumer.Subscribe("topic2");

            thread.Start(token.Token);

            AssertExtensions.WaitUntil(() => thread.ActiveTasks.Count() == numberPartitions,
                                       TimeSpan.FromSeconds(5),
                                       TimeSpan.FromMilliseconds(100));

            int nbMessage = 1000;

            // produce ${nbMessage} messages to input topic
            for (int i = 0; i < nbMessage; ++i)
            {
                producer.Produce("topic", new Confluent.Kafka.Message <byte[], byte[]>
                {
                    Key   = serdes.Serialize("key" + i, new SerializationContext()),
                    Value = serdes.Serialize("Hi" + i, new SerializationContext())
                });
            }

            var messagesSink = new List <ConsumeResult <byte[], byte[]> >();

            AssertExtensions.WaitUntil(() =>
            {
                messagesSink.AddRange(consumer.ConsumeRecords(TimeSpan.FromSeconds(1)));
                return(messagesSink.Count == nbMessage);
            }, TimeSpan.FromSeconds(5),
                                       TimeSpan.FromMilliseconds(10));

            // waiting end of processing
            Thread.Sleep(1000);

            long now     = DateTime.Now.GetMilliseconds();
            var  sensors = streamMetricsRegistry.GetThreadScopeSensor(threadId);

            var createTaskSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.CREATE_TASK)));

            Assert.AreEqual(2, createTaskSensor.Metrics.Count());
            Assert.AreEqual(numberPartitions,
                            createTaskSensor.Metrics[MetricName.NameAndGroup(
                                                         ThreadMetrics.CREATE_TASK + StreamMetricsRegistry.TOTAL_SUFFIX,
                                                         StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value);
            Assert.IsTrue(
                (double)(createTaskSensor.Metrics[MetricName.NameAndGroup(
                                                      ThreadMetrics.CREATE_TASK + StreamMetricsRegistry.RATE_SUFFIX,
                                                      StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value) > 0d);

            var closeTaskSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.CLOSE_TASK)));

            Assert.AreEqual(2, closeTaskSensor.Metrics.Count());
            Assert.AreEqual(0d,
                            closeTaskSensor.Metrics[MetricName.NameAndGroup(
                                                        ThreadMetrics.CLOSE_TASK + StreamMetricsRegistry.TOTAL_SUFFIX,
                                                        StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value);
            Assert.AreEqual(0d,
                            closeTaskSensor.Metrics[MetricName.NameAndGroup(
                                                        ThreadMetrics.CLOSE_TASK + StreamMetricsRegistry.RATE_SUFFIX,
                                                        StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value);

            var commitSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.COMMIT)));

            Assert.AreEqual(4, commitSensor.Metrics.Count());
            Assert.IsTrue(
                (double)commitSensor.Metrics[MetricName.NameAndGroup(
                                                 ThreadMetrics.COMMIT + StreamMetricsRegistry.TOTAL_SUFFIX,
                                                 StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)commitSensor.Metrics[MetricName.NameAndGroup(
                                                 ThreadMetrics.COMMIT + StreamMetricsRegistry.RATE_SUFFIX,
                                                 StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)commitSensor.Metrics[MetricName.NameAndGroup(
                                                 ThreadMetrics.COMMIT + StreamMetricsRegistry.LATENCY_SUFFIX + StreamMetricsRegistry.AVG_SUFFIX,
                                                 StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)commitSensor.Metrics[MetricName.NameAndGroup(
                                                 ThreadMetrics.COMMIT + StreamMetricsRegistry.LATENCY_SUFFIX + StreamMetricsRegistry.MAX_SUFFIX,
                                                 StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);

            var pollSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.POLL)));

            Assert.AreEqual(4, pollSensor.Metrics.Count());
            Assert.IsTrue(
                (double)pollSensor.Metrics[MetricName.NameAndGroup(
                                               ThreadMetrics.POLL + StreamMetricsRegistry.TOTAL_SUFFIX,
                                               StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)pollSensor.Metrics[MetricName.NameAndGroup(
                                               ThreadMetrics.POLL + StreamMetricsRegistry.RATE_SUFFIX,
                                               StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)pollSensor.Metrics[MetricName.NameAndGroup(
                                               ThreadMetrics.POLL + StreamMetricsRegistry.LATENCY_SUFFIX + StreamMetricsRegistry.AVG_SUFFIX,
                                               StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)pollSensor.Metrics[MetricName.NameAndGroup(
                                               ThreadMetrics.POLL + StreamMetricsRegistry.LATENCY_SUFFIX + StreamMetricsRegistry.MAX_SUFFIX,
                                               StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);

            var pollRecordsSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.POLL + StreamMetricsRegistry.RECORDS_SUFFIX)));

            Assert.AreEqual(2, pollRecordsSensor.Metrics.Count());
            Assert.IsTrue(
                (double)pollRecordsSensor.Metrics[MetricName.NameAndGroup(
                                                      ThreadMetrics.POLL + StreamMetricsRegistry.RECORDS_SUFFIX + StreamMetricsRegistry.AVG_SUFFIX,
                                                      StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)pollRecordsSensor.Metrics[MetricName.NameAndGroup(
                                                      ThreadMetrics.POLL + StreamMetricsRegistry.RECORDS_SUFFIX + StreamMetricsRegistry.MAX_SUFFIX,
                                                      StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);

            var processRecordsSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.PROCESS + StreamMetricsRegistry.RECORDS_SUFFIX)));

            Assert.AreEqual(2, processRecordsSensor.Metrics.Count());
            Assert.IsTrue(
                (double)processRecordsSensor.Metrics[MetricName.NameAndGroup(
                                                         ThreadMetrics.PROCESS + StreamMetricsRegistry.RECORDS_SUFFIX + StreamMetricsRegistry.AVG_SUFFIX,
                                                         StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)processRecordsSensor.Metrics[MetricName.NameAndGroup(
                                                         ThreadMetrics.PROCESS + StreamMetricsRegistry.RECORDS_SUFFIX + StreamMetricsRegistry.MAX_SUFFIX,
                                                         StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);

            var processRateSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.PROCESS + StreamMetricsRegistry.RATE_SUFFIX)));

            Assert.AreEqual(2, processRateSensor.Metrics.Count());
            Assert.IsTrue(
                (double)processRateSensor.Metrics[MetricName.NameAndGroup(
                                                      ThreadMetrics.PROCESS + StreamMetricsRegistry.RATE_SUFFIX + StreamMetricsRegistry.RATE_SUFFIX,
                                                      StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)processRateSensor.Metrics[MetricName.NameAndGroup(
                                                      ThreadMetrics.PROCESS + StreamMetricsRegistry.RATE_SUFFIX + StreamMetricsRegistry.TOTAL_SUFFIX,
                                                      StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);

            var processLatencySensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.PROCESS + StreamMetricsRegistry.LATENCY_SUFFIX)));

            Assert.AreEqual(2, processLatencySensor.Metrics.Count());
            Assert.IsTrue(
                (double)processLatencySensor.Metrics[MetricName.NameAndGroup(
                                                         ThreadMetrics.PROCESS + StreamMetricsRegistry.LATENCY_SUFFIX + StreamMetricsRegistry.AVG_SUFFIX,
                                                         StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)processLatencySensor.Metrics[MetricName.NameAndGroup(
                                                         ThreadMetrics.PROCESS + StreamMetricsRegistry.LATENCY_SUFFIX + StreamMetricsRegistry.MAX_SUFFIX,
                                                         StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value > 0d);

            // ratio sensors
            var processRatioSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.PROCESS + StreamMetricsRegistry.RATIO_SUFFIX)));
            var pollRatioSensor    = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.POLL + StreamMetricsRegistry.RATIO_SUFFIX)));
            var commitRatioSensor  = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(ThreadMetrics.COMMIT + StreamMetricsRegistry.RATIO_SUFFIX)));

            Assert.AreEqual(1, processRatioSensor.Metrics.Count());
            Assert.AreEqual(1, pollRatioSensor.Metrics.Count());
            Assert.AreEqual(1, commitRatioSensor.Metrics.Count());

            var processRatioValue = (double)processRatioSensor.Metrics[MetricName.NameAndGroup(
                                                                           ThreadMetrics.PROCESS + StreamMetricsRegistry.RATIO_SUFFIX,
                                                                           StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value;

            var pollRatioValue = (double)pollRatioSensor.Metrics[MetricName.NameAndGroup(
                                                                     ThreadMetrics.POLL + StreamMetricsRegistry.RATIO_SUFFIX,
                                                                     StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value;

            var commitRatioValue = (double)commitRatioSensor.Metrics[MetricName.NameAndGroup(
                                                                         ThreadMetrics.COMMIT + StreamMetricsRegistry.RATIO_SUFFIX,
                                                                         StreamMetricsRegistry.THREAD_LEVEL_GROUP)].Value;

            double total = Math.Round(processRatioValue + pollRatioValue + commitRatioValue, 2);

            // we accept 10% of lost
            Assert.IsTrue(total >= 0.90d);
        }
        public void StreamThreadNormalWorkflowWithRebalancing()
        {
            List <ThreadState> allStates = new List <ThreadState>();
            var expectedStates           = new List <ThreadState>
            {
                ThreadState.CREATED,
                ThreadState.STARTING,
                ThreadState.PARTITIONS_ASSIGNED,
                ThreadState.RUNNING,
                ThreadState.PARTITIONS_REVOKED,
                ThreadState.PARTITIONS_ASSIGNED,
                ThreadState.RUNNING,
                ThreadState.PENDING_SHUTDOWN,
                ThreadState.DEAD
            };

            var source = new System.Threading.CancellationTokenSource();
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId = "test";
            config.Guarantee     = ProcessingGuarantee.AT_LEAST_ONCE;
            config.PollMs        = 1;

            var consumeConfig = config.Clone();

            consumeConfig.ApplicationId = "consume-test";

            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder.Stream <string, string>("topic").To("topic2");

            var topo = builder.Build();

            var supplier = new MockKafkaSupplier(4);
            var producer = supplier.GetProducer(consumeConfig.ToProducerConfig());
            var consumer = supplier.GetConsumer(consumeConfig.ToConsumerConfig("test-consum"), null);

            consumer.Subscribe("topic2");

            var thread = StreamThread.Create(
                "thread-0", "c0",
                topo.Builder, config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                0) as StreamThread;

            allStates.Add(thread.State);
            thread.StateChanged += (t, o, n) =>
            {
                Assert.IsInstanceOf <ThreadState>(n);
                allStates.Add(n as ThreadState);
            };

            thread.Start(source.Token);
            // WAIT PARTITONS ASSIGNED
            System.Threading.Thread.Sleep(50);

            var thread2 = StreamThread.Create(
                "thread-1", "c1",
                topo.Builder, config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                1) as StreamThread;

            thread2.Start(source.Token);
            // WAIT PARTITONS REBALANCING
            System.Threading.Thread.Sleep(50);

            producer.Produce("topic", new Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key1", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });
            //WAIT STREAMTHREAD PROCESS MESSAGE
            System.Threading.Thread.Sleep(100);
            var message = consumer.Consume(100);

            // 2 CONSUMER FOR THE SAME GROUP ID => TOPIC WITH 4 PARTITIONS
            Assert.AreEqual(2, thread.ActiveTasks.Count());
            Assert.AreEqual(2, thread2.ActiveTasks.Count());

            source.Cancel();
            thread.Dispose();
            thread2.Dispose();

            Assert.AreEqual("key1", serdes.Deserialize(message.Message.Key, new SerializationContext()));
            Assert.AreEqual("coucou", serdes.Deserialize(message.Message.Value, new SerializationContext()));
            Assert.AreEqual(expectedStates, allStates);
            // Destroy in memory cluster
            supplier.Destroy();
        }
        public void TaskMetricsTest()
        {
            var serdes      = new StringSerDes();
            var cloneConfig = config.Clone();

            cloneConfig.ApplicationId = "consume-test";
            var producer = syncKafkaSupplier.GetProducer(cloneConfig.ToProducerConfig());
            var consumer = syncKafkaSupplier.GetConsumer(cloneConfig.ToConsumerConfig("test-consum"), null);

            consumer.Subscribe("topic2");

            int nbMessage = 1000;
            // produce 1000 messages to input topic
            List <ConsumeResult <byte[], byte[]> > messages = new List <ConsumeResult <byte[], byte[]> >();
            int offset = 0;

            for (int i = 0; i < nbMessage; ++i)
            {
                messages.Add(
                    new ConsumeResult <byte[], byte[]>
                {
                    Message = new Message <byte[], byte[]>
                    {
                        Key   = serdes.Serialize($"key{i + 1}", new SerializationContext()),
                        Value = serdes.Serialize($"value{i + 1}", new SerializationContext())
                    },
                    TopicPartitionOffset = new TopicPartitionOffset(topicPartition, offset++)
                });
            }

            task.AddRecords(messages);

            while (task.CanProcess(DateTime.Now.GetMilliseconds()))
            {
                Assert.IsTrue(task.Process());
                Assert.IsTrue(task.CommitNeeded);
                task.Commit();
            }

            var messagesSink = new List <ConsumeResult <byte[], byte[]> >();

            AssertExtensions.WaitUntil(() =>
            {
                messagesSink.AddRange(consumer.ConsumeRecords(TimeSpan.FromSeconds(1)));
                return(messagesSink.Count < nbMessage);
            }, TimeSpan.FromSeconds(5),
                                       TimeSpan.FromMilliseconds(10));

            long now     = DateTime.Now.GetMilliseconds();
            var  sensors = streamMetricsRegistry.GetThreadScopeSensor(threadId);

            var processorSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(TaskMetrics.PROCESS)));

            Assert.AreEqual(2, processorSensor.Metrics.Count());
            Assert.AreEqual(nbMessage,
                            processorSensor.Metrics[MetricName.NameAndGroup(
                                                        TaskMetrics.PROCESS + StreamMetricsRegistry.TOTAL_SUFFIX,
                                                        StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);
            Assert.IsTrue(
                (double)(processorSensor.Metrics[MetricName.NameAndGroup(
                                                     TaskMetrics.PROCESS + StreamMetricsRegistry.RATE_SUFFIX,
                                                     StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value) > 0d);

            var enforcedProcessorSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(TaskMetrics.ENFORCED_PROCESSING)));

            Assert.AreEqual(2, enforcedProcessorSensor.Metrics.Count());
            Assert.AreEqual(0,
                            enforcedProcessorSensor.Metrics[MetricName.NameAndGroup(
                                                                TaskMetrics.ENFORCED_PROCESSING + StreamMetricsRegistry.TOTAL_SUFFIX,
                                                                StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);
            Assert.AreEqual(0,
                            enforcedProcessorSensor.Metrics[MetricName.NameAndGroup(
                                                                TaskMetrics.ENFORCED_PROCESSING + StreamMetricsRegistry.RATE_SUFFIX,
                                                                StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);

            var processLatency = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(TaskMetrics.PROCESS_LATENCY)));

            Assert.AreEqual(2, processLatency.Metrics.Count());
            Assert.IsTrue(
                (double)processLatency.Metrics[MetricName.NameAndGroup(
                                                   TaskMetrics.PROCESS_LATENCY + StreamMetricsRegistry.AVG_SUFFIX,
                                                   StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value > 0d);
            Assert.IsTrue(
                (double)processLatency.Metrics[MetricName.NameAndGroup(
                                                   TaskMetrics.PROCESS_LATENCY + StreamMetricsRegistry.MAX_SUFFIX,
                                                   StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value > 0d);

            var commitSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(TaskMetrics.COMMIT)));

            Assert.AreEqual(2, commitSensor.Metrics.Count());
            Assert.AreEqual(nbMessage,
                            commitSensor.Metrics[MetricName.NameAndGroup(
                                                     TaskMetrics.COMMIT + StreamMetricsRegistry.TOTAL_SUFFIX,
                                                     StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);
            Assert.IsTrue(
                (double)commitSensor.Metrics[MetricName.NameAndGroup(
                                                 TaskMetrics.COMMIT + StreamMetricsRegistry.RATE_SUFFIX,
                                                 StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value > 0d);

            var droppedRecordSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(TaskMetrics.DROPPED_RECORDS)));

            Assert.AreEqual(2, droppedRecordSensor.Metrics.Count());
            Assert.AreEqual(0,
                            droppedRecordSensor.Metrics[MetricName.NameAndGroup(
                                                            TaskMetrics.DROPPED_RECORDS + StreamMetricsRegistry.TOTAL_SUFFIX,
                                                            StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);
            Assert.AreEqual(0,
                            droppedRecordSensor.Metrics[MetricName.NameAndGroup(
                                                            TaskMetrics.DROPPED_RECORDS + StreamMetricsRegistry.RATE_SUFFIX,
                                                            StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);

            var activeBufferedRecordSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(TaskMetrics.ACTIVE_TASK_PREFIX + TaskMetrics.BUFFER_COUNT)));

            Assert.AreEqual(1, activeBufferedRecordSensor.Metrics.Count());
            Assert.AreEqual(0,
                            activeBufferedRecordSensor.Metrics[MetricName.NameAndGroup(
                                                                   TaskMetrics.ACTIVE_TASK_PREFIX + TaskMetrics.BUFFER_COUNT,
                                                                   StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);

            var restorationRecordsSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(TaskMetrics.RESTORATION_RECORDS)));

            Assert.AreEqual(1, restorationRecordsSensor.Metrics.Count());
            Assert.AreEqual(0,
                            restorationRecordsSensor.Metrics[MetricName.NameAndGroup(
                                                                 TaskMetrics.RESTORATION_RECORDS,
                                                                 StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);

            var activeRestorationSensor = sensors.FirstOrDefault(s => s.Name.Equals(GetSensorName(TaskMetrics.ACTIVE_RESTORATION)));

            Assert.AreEqual(1, activeRestorationSensor.Metrics.Count());
            Assert.AreEqual(0,
                            activeRestorationSensor.Metrics[MetricName.NameAndGroup(
                                                                TaskMetrics.ACTIVE_RESTORATION,
                                                                StreamMetricsRegistry.TASK_LEVEL_GROUP)].Value);
        }
Exemplo n.º 7
0
        public void StreamThreadRestorationPhaseStartDifferent()
        {
            var producerConfig = config.Clone();

            producerConfig.ApplicationId = "produce-test";

            var serdes   = new StringSerDes();
            var producer = mockKafkaSupplier.GetProducer(producerConfig.ToProducerConfig());

            thread1.Start(token1.Token);
            Thread.Sleep(1500);
            thread2.Start(token2.Token);

            producer.Produce(new TopicPartition("topic", 0), new Confluent.Kafka.Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key1", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });
            producer.Produce(new TopicPartition("topic", 1), new Confluent.Kafka.Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key2", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });

            AssertExtensions.WaitUntil(
                () => thread1.State == ThreadState.RUNNING &&
                thread2.State == ThreadState.RUNNING,
                TimeSpan.FromSeconds(5),
                TimeSpan.FromMilliseconds(20));

            // 2 CONSUMER FOR THE SAME GROUP ID => TOPIC WITH 2 PARTITIONS
            Assert.AreEqual(1, thread1.ActiveTasks.Count());
            Assert.AreEqual(1, thread2.ActiveTasks.Count());

            AssertExtensions.WaitUntil(
                () => thread1.ActiveTasks.ToList()[0].State == TaskState.RUNNING &&
                thread2.ActiveTasks.ToList()[0].State == TaskState.RUNNING,
                TimeSpan.FromSeconds(5),
                TimeSpan.FromMilliseconds(20));

            var storeThread1 =
                thread1.ActiveTasks.ToList()[0].GetStore("store") as ITimestampedKeyValueStore <string, string>;
            var storeThread2 =
                thread2.ActiveTasks.ToList()[0].GetStore("store") as ITimestampedKeyValueStore <string, string>;

            Assert.IsNotNull(storeThread1);
            Assert.IsNotNull(storeThread2);

            AssertExtensions.WaitUntil(
                () => storeThread1.All().ToList().Count == 1,
                TimeSpan.FromSeconds(5),
                TimeSpan.FromMilliseconds(20));

            AssertExtensions.WaitUntil(
                () => storeThread2.All().ToList().Count == 1,
                TimeSpan.FromSeconds(5),
                TimeSpan.FromMilliseconds(20));

            var totalItemsSt1 = storeThread1.All().ToList();
            var totalItemsSt2 = storeThread2.All().ToList();

            Assert.AreEqual(1, totalItemsSt1.Count);
            Assert.AreEqual(1, totalItemsSt2.Count);

            // Thread2 closed, partitions assigned from thread2 rebalance to thread1
            // Thread1 need to restore state store
            token2.Cancel();
            thread2.Dispose();

            thread2Disposed = true;

            producer.Produce(new TopicPartition("topic", 1), new Confluent.Kafka.Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key3", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });

            AssertExtensions.WaitUntil(
                () => thread1.State == ThreadState.RUNNING && thread1.ActiveTasks.Count() == 2,
                TimeSpan.FromSeconds(5),
                TimeSpan.FromMilliseconds(20));

            Assert.AreEqual(2, thread1.ActiveTasks.Count());

            AssertExtensions.WaitUntil(
                () => thread1.ActiveTasks.ToList()[0].State == TaskState.RUNNING &&
                thread1.ActiveTasks.ToList()[1].State == TaskState.RUNNING,
                TimeSpan.FromSeconds(5),
                TimeSpan.FromMilliseconds(20));

            var storeThreadTask1 =
                thread1.ActiveTasks.ToList()[0].GetStore("store") as ITimestampedKeyValueStore <string, string>;
            var storeThreadTask2 =
                thread1.ActiveTasks.ToList()[1].GetStore("store") as ITimestampedKeyValueStore <string, string>;

            Assert.IsNotNull(storeThreadTask1);
            Assert.IsNotNull(storeThreadTask2);

            bool task0Part0 = thread1.ActiveTasks.ToList()[0].Id.Partition == 0;

            AssertExtensions.WaitUntil(
                () => storeThreadTask1.All().ToList().Count == (task0Part0 ? 1 : 2),
                TimeSpan.FromSeconds(1),
                TimeSpan.FromMilliseconds(20));

            AssertExtensions.WaitUntil(
                () => storeThreadTask2.All().ToList().Count == (task0Part0 ? 2 : 1),
                TimeSpan.FromSeconds(1),
                TimeSpan.FromMilliseconds(20));

            var totalItemsSt10 = storeThreadTask1.All().ToList();
            var totalItemsSt11 = storeThreadTask2.All().ToList();

            Assert.AreEqual((task0Part0 ? 1 : 2), totalItemsSt10.Count);
            Assert.AreEqual((task0Part0 ? 2 : 1), totalItemsSt11.Count);
        }