示例#1
0
        public void StreamThreadCommitIntervalWorkflow()
        {
            var source = new System.Threading.CancellationTokenSource();
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId    = "test";
            config.Guarantee        = ProcessingGuarantee.AT_LEAST_ONCE;
            config.PollMs           = 1;
            config.CommitIntervalMs = 1;

            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder.Stream <string, string>("topic").To("topic2");

            var topo = builder.Build();

            var supplier = new SyncKafkaSupplier();
            var producer = supplier.GetProducer(config.ToProducerConfig());
            var consumer = supplier.GetConsumer(config.ToConsumerConfig("test-consum"), null);

            consumer.Subscribe("topic2");
            var thread = StreamThread.Create(
                "thread-0", "c0",
                topo.Builder, new StreamMetricsRegistry(), config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                0) as StreamThread;

            thread.Start(source.Token);
            producer.Produce("topic", new Confluent.Kafka.Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key1", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });
            //WAIT STREAMTHREAD PROCESS MESSAGE
            System.Threading.Thread.Sleep(100);
            var message = consumer.Consume(100);

            Assert.AreEqual("key1", serdes.Deserialize(message.Message.Key, new SerializationContext()));
            Assert.AreEqual("coucou", serdes.Deserialize(message.Message.Value, new SerializationContext()));

            var offsets = thread.GetCommittedOffsets(new List <TopicPartition> {
                new TopicPartition("topic", 0)
            },
                                                     TimeSpan.FromSeconds(10)).ToList();

            Assert.AreEqual(1, offsets.Count);
            Assert.AreEqual(1, offsets[0].Offset.Value);
            Assert.AreEqual(0, offsets[0].TopicPartition.Partition.Value);
            Assert.AreEqual("topic", offsets[0].Topic);

            source.Cancel();
            thread.Dispose();
        }
示例#2
0
        public void DeserializeNullData()
        {
            var serdes = new StringSerDes();
            var r      = serdes.Deserialize(null, new Confluent.Kafka.SerializationContext());

            Assert.IsNull(r);
        }
        public void DeserializeNullData()
        {
            var serdes = new StringSerDes();
            var r      = serdes.Deserialize(null);

            Assert.IsNull(r);
        }
示例#4
0
        public void EnumeratorReverseRangeAll()
        {
            var serdes = new StringSerDes();

            string deserialize(byte[] bytes)
            {
                return(serdes.Deserialize(bytes, new SerializationContext()));
            }

            byte[] key  = serdes.Serialize("key", new SerializationContext()), value = serdes.Serialize("value", new SerializationContext());
            byte[] key2 = serdes.Serialize("key2", new SerializationContext()), value2 = serdes.Serialize("value2", new SerializationContext());
            byte[] key3 = serdes.Serialize("key3", new SerializationContext()), value3 = serdes.Serialize("value3", new SerializationContext());

            store.Put(new Bytes(key), value);
            store.Put(new Bytes(key2), value2);
            store.Put(new Bytes(key3), value3);

            var enumerator = store.ReverseRange(new Bytes(key), new Bytes(key2));

            Assert.IsTrue(enumerator.MoveNext());
            Assert.AreEqual("key2", deserialize(enumerator.Current.Value.Key.Get));
            Assert.AreEqual("value2", deserialize(enumerator.Current.Value.Value));
            Assert.IsTrue(enumerator.MoveNext());
            Assert.AreEqual("key", deserialize(enumerator.Current.Value.Key.Get));
            Assert.AreEqual("value", deserialize(enumerator.Current.Value.Value));
            Assert.IsFalse(enumerator.MoveNext());
            enumerator.Dispose();
        }
示例#5
0
        public void DeserializeData()
        {
            string s = "test";

            var serdes = new StringSerDes();
            var r      = serdes.Deserialize(serdes.Serialize(s, new Confluent.Kafka.SerializationContext()), new Confluent.Kafka.SerializationContext());

            Assert.IsNotNull(r);
            Assert.AreEqual(s, r);
        }
        public void DeserializeData()
        {
            string s = "test";

            var serdes = new StringSerDes();
            var r      = serdes.Deserialize(serdes.Serialize(s));

            Assert.IsNotNull(r);
            Assert.AreEqual(s, r);
        }
示例#7
0
        public void StreamThreadNormalWorkflowWithRebalancing()
        {
            var consumeConfig = config.Clone();

            consumeConfig.ApplicationId = "consume-test";

            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder.Stream <string, string>("topic").To("topic2");

            var topo = builder.Build();

            var producer = mockKafkaSupplier.GetProducer(consumeConfig.ToProducerConfig());
            var consumer = mockKafkaSupplier.GetConsumer(consumeConfig.ToConsumerConfig("test-consum"), null);

            consumer.Subscribe("topic2");

            thread1.Start(token1.Token);
            thread2.Start(token2.Token);

            producer.Produce("topic", new Confluent.Kafka.Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key1", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });
            //WAIT STREAMTHREAD PROCESS MESSAGE

            AssertExtensions.WaitUntil(() => thread1.ActiveTasks.Count() == 2, TimeSpan.FromSeconds(5),
                                       TimeSpan.FromMilliseconds(10));
            AssertExtensions.WaitUntil(() => thread2.ActiveTasks.Count() == 2, TimeSpan.FromSeconds(5),
                                       TimeSpan.FromMilliseconds(10));

            // 2 CONSUMER FOR THE SAME GROUP ID => TOPIC WITH 4 PARTITIONS
            Assert.AreEqual(2, thread1.ActiveTasks.Count());
            Assert.AreEqual(2, thread2.ActiveTasks.Count());

            var message = consumer.Consume(100);

            Assert.AreEqual("key1", serdes.Deserialize(message.Message.Key, new SerializationContext()));
            Assert.AreEqual("coucou", serdes.Deserialize(message.Message.Value, new SerializationContext()));
            // TODO : Finish test with a real cluster Assert.AreEqual(expectedStates, allStates);
        }
示例#8
0
        public void DeleteKeyExist()
        {
            var serdes = new StringSerDes();

            byte[] key = serdes.Serialize("key", new SerializationContext()),
            value = serdes.Serialize("value", new SerializationContext());

            store.Put(new Bytes(key), value);
            Assert.AreEqual(1, store.ApproximateNumEntries());
            var v = store.Delete(new Bytes(key));

            Assert.AreEqual(0, store.ApproximateNumEntries());
            Assert.AreEqual("value", serdes.Deserialize(v, new SerializationContext()));
        }
        public void QueueDequeueOneMessageTest()
        {
            var timestampEx     = new FailOnInvalidTimestamp();
            var serdes          = new StringSerDes();
            var sourceProcessor = new SourceProcessor <string, string>("source", "test", serdes, serdes, timestampEx);
            var recordQueue     = new RecordQueue("", "", timestampEx, new TopicPartition("test", 0), sourceProcessor);

            recordQueue.Queue(new ConsumeResult <byte[], byte[]>()
            {
                Message = new Message <byte[], byte[]>
                {
                    Key   = serdes.Serialize("key", new SerializationContext()),
                    Value = serdes.Serialize("test", new SerializationContext())
                }
            });
            var r = recordQueue.Poll();

            Assert.IsNotNull(r);
            Assert.AreEqual("key", serdes.Deserialize(r.Message.Key, new SerializationContext()));
            Assert.AreEqual("test", serdes.Deserialize(r.Message.Value, new SerializationContext()));
            Assert.AreEqual(0, recordQueue.Size);
            Assert.IsTrue(recordQueue.IsEmpty);
        }
        public void DeleteKeyExist()
        {
            var serdes = new StringSerDes();

            byte[] key = serdes.Serialize("key"),
            value = serdes.Serialize("value");

            var store = new InMemoryKeyValueStore("store");

            store.Put(new Bytes(key), value);
            Assert.AreEqual(1, store.ApproximateNumEntries());
            var v = store.Delete(new Bytes(key));

            Assert.AreEqual(0, store.ApproximateNumEntries());
            Assert.AreEqual("value", serdes.Deserialize(v));
        }
        public void EnumeratorReset()
        {
            var serdes = new StringSerDes();

            byte[] key = serdes.Serialize("key", new SerializationContext()), value = serdes.Serialize("value", new SerializationContext());

            store.Put(new Bytes(key), value, 100);

            var enumerator = store.All();

            Assert.IsTrue(enumerator.MoveNext());
            Assert.AreEqual("key", serdes.Deserialize(enumerator.PeekNextKey().Key.Get, new SerializationContext()));
            Assert.IsFalse(enumerator.MoveNext());
            enumerator.Reset();
            Assert.IsTrue(enumerator.MoveNext());
            enumerator.Dispose();
        }
示例#12
0
        public void PutKeyExist()
        {
            var serdes = new StringSerDes();

            byte[] key = serdes.Serialize("key", new SerializationContext()),
            value  = serdes.Serialize("value", new SerializationContext()),
            value2 = serdes.Serialize("value2", new SerializationContext());

            store.Put(new Bytes(key), value);
            store.Put(new Bytes(key), value2);
            var e = store.All().ToList();

            Assert.AreEqual(1, e.Count);
            var v = store.Get(new Bytes(key));

            Assert.AreEqual("value2", serdes.Deserialize(v, new SerializationContext()));
        }
        public void PutKeyExist()
        {
            var serdes = new StringSerDes();

            byte[] key = serdes.Serialize("key", new SerializationContext()),
            value  = serdes.Serialize("value", new SerializationContext()),
            value2 = serdes.Serialize("value2", new SerializationContext());

            var store = new InMemoryKeyValueStore("store");

            store.Put(new Bytes(key), value);
            store.Put(new Bytes(key), value2);
            Assert.AreEqual(1, store.ApproximateNumEntries());
            var v = store.Get(new Bytes(key));

            Assert.AreEqual("value2", serdes.Deserialize(v, new SerializationContext()));
        }
示例#14
0
        public void EnumeratorIncorrectRange()
        {
            var serdes = new StringSerDes();

            string deserialize(byte[] bytes)
            {
                return(serdes.Deserialize(bytes, new SerializationContext()));
            }

            byte[] key  = serdes.Serialize("key", new SerializationContext()), value = serdes.Serialize("value", new SerializationContext());
            byte[] key2 = serdes.Serialize("key2", new SerializationContext()), value2 = serdes.Serialize("value2", new SerializationContext());
            byte[] key3 = serdes.Serialize("key3", new SerializationContext()), value3 = serdes.Serialize("value3", new SerializationContext());

            store.Put(new Bytes(key), value);
            store.Put(new Bytes(key2), value2);
            store.Put(new Bytes(key3), value3);

            var enumerator = store.Range(new Bytes(key2), new Bytes(key));

            Assert.IsFalse(enumerator.MoveNext());
            enumerator.Dispose();
        }
        public void EnumeratorFetchByKeyNoWindowTest()
        {
            var serdes = new StringSerDes();

            string deserialize(byte[] bytes)
            {
                return(serdes.Deserialize(bytes, new SerializationContext()));
            }

            byte[] key  = serdes.Serialize("key", new SerializationContext()), value = serdes.Serialize("value", new SerializationContext());
            byte[] key2 = serdes.Serialize("key2", new SerializationContext()), value2 = serdes.Serialize("value2", new SerializationContext());
            byte[] key3 = serdes.Serialize("key3", new SerializationContext()), value3 = serdes.Serialize("value3", new SerializationContext());

            store.Put(new Bytes(key), value, 100);
            store.Put(new Bytes(key2), value2, 100);
            store.Put(new Bytes(key3), value3, 100);

            var enumerator = store.Fetch(new Bytes(key), 50, 75);

            Assert.IsFalse(enumerator.MoveNext());
            enumerator.Dispose();
        }
        public void EnumeratorFetchByKeyTest()
        {
            var serdes = new StringSerDes();

            string deserialize(byte[] bytes)
            {
                return(serdes.Deserialize(bytes, new SerializationContext()));
            }

            byte[] key  = serdes.Serialize("key", new SerializationContext()), value = serdes.Serialize("value", new SerializationContext());
            byte[] key2 = serdes.Serialize("test", new SerializationContext()), value2 = serdes.Serialize("value2", new SerializationContext());
            byte[] key3 = serdes.Serialize("toto", new SerializationContext()), value3 = serdes.Serialize("value3", new SerializationContext());

            store.Put(new Bytes(key), value, 100);
            store.Put(new Bytes(key2), value2, 100);
            store.Put(new Bytes(key3), value3, 100);

            var enumerator = store.Fetch(new Bytes(key), 50, 150);

            Assert.IsTrue(enumerator.MoveNext());
            Assert.AreEqual("value", deserialize(enumerator.Current.Value.Value));
            Assert.IsFalse(enumerator.MoveNext());
            enumerator.Dispose();
        }
 private string FromKey(byte[] keyBytes)
 => stringSerDes.Deserialize(keyBytes, new SerializationContext());
示例#18
0
        public override T Deserialize(byte[] data, SerializationContext context)
        {
            var s = innerSerdes.Deserialize(data, context);

            return(Newtonsoft.Json.JsonConvert.DeserializeObject <T>(s));
        }
示例#19
0
        public void StreamThreadNormalWorkflow()
        {
            bool metricsReporterCalled   = false;
            List <ThreadState> allStates = new List <ThreadState>();
            var expectedStates           = new List <ThreadState>
            {
                ThreadState.CREATED,
                ThreadState.STARTING,
                ThreadState.PARTITIONS_ASSIGNED,
                ThreadState.RUNNING,
                ThreadState.PENDING_SHUTDOWN,
                ThreadState.DEAD
            };

            var source = new System.Threading.CancellationTokenSource();
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId   = "test";
            config.Guarantee       = ProcessingGuarantee.AT_LEAST_ONCE;
            config.PollMs          = 1;
            config.MetricsReporter = (sensor) => { metricsReporterCalled = true; };
            config.AddOrUpdate(StreamConfig.metricsIntervalMsCst, 10);

            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder.Stream <string, string>("topic").To("topic2");

            var topo = builder.Build();

            var supplier = new SyncKafkaSupplier();
            var producer = supplier.GetProducer(config.ToProducerConfig());
            var consumer = supplier.GetConsumer(config.ToConsumerConfig("test-consum"), null);

            consumer.Subscribe("topic2");
            var thread = StreamThread.Create(
                "thread-0", "c0",
                topo.Builder, new StreamMetricsRegistry(), config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                0) as StreamThread;

            allStates.Add(thread.State);
            thread.StateChanged += (t, o, n) =>
            {
                Assert.IsInstanceOf <ThreadState>(n);
                allStates.Add(n as ThreadState);
            };

            thread.Start(source.Token);
            producer.Produce("topic", new Confluent.Kafka.Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key1", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });
            //WAIT STREAMTHREAD PROCESS MESSAGE
            System.Threading.Thread.Sleep(100);
            var message = consumer.Consume(100);

            source.Cancel();
            thread.Dispose();

            Assert.AreEqual("key1", serdes.Deserialize(message.Message.Key, new SerializationContext()));
            Assert.AreEqual("coucou", serdes.Deserialize(message.Message.Value, new SerializationContext()));
            Assert.AreEqual(expectedStates, allStates);
            Assert.IsTrue(metricsReporterCalled);
        }
 private string FromValue(byte[] valueBytes)
 => stringSerDes.Deserialize(valueBytes, new SerializationContext());
示例#21
0
        public void AddOneRecordAndPollTest()
        {
            var grouper = new PartitionGrouper(GetPartitions());

            grouper.AddRecord(topicPart1, MakeMessageWithKey("key", "test"));
            Assert.IsFalse(grouper.AllPartitionsBuffered);
            Assert.AreEqual(1, grouper.NumBuffered());
            Assert.AreEqual(1, grouper.NumBuffered(topicPart1));
            var record = grouper.NextRecord;

            Assert.IsNotNull(record);
            Assert.AreEqual("key", serdes.Deserialize(record.Record.Message.Key, new SerializationContext()));
            Assert.AreEqual("test", serdes.Deserialize(record.Record.Message.Value, new SerializationContext()));
            Assert.AreEqual("source1", record.Processor.Name);
            Assert.IsTrue(record.Queue.IsEmpty);
        }
 string Deserialize(byte[] data) => serdes.Deserialize(data, new SerializationContext());
        //[Test]
        // TODO : fix that
        public void WorkflowCompleteBufferedRecordsTest()
        {
            int maxBuffered = 10;
            var token       = new System.Threading.CancellationTokenSource();
            var serdes      = new StringSerDes();
            var config      = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId = "test-group";
            config.MaxTaskIdleMs = (long)TimeSpan.FromSeconds(100).TotalMilliseconds;
            config.BufferedRecordsPerPartition = maxBuffered;
            config.PollMs = 10;

            var builder = new StreamBuilder();

            var stream1 = builder.Stream <string, string>("topic1");
            var stream2 = builder.Stream <string, string>("topic2");

            stream1
            .Join(stream2, (v1, v2) => $"{v1}-{v2}", JoinWindowOptions.Of(TimeSpan.FromSeconds(10)))
            .To("output");

            var topo = builder.Build();

            var supplier = new SyncKafkaSupplier();
            var producer = supplier.GetProducer(config.ToProducerConfig());
            var consumer = supplier.GetConsumer(config.ToConsumerConfig("test-consum"), null);

            consumer.Subscribe("output");
            var thread = StreamThread.Create(
                "thread-0", "c0",
                topo.Builder, config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                0) as StreamThread;

            thread.Start(token.Token);

            for (int i = 0; i < maxBuffered + 1; ++i)
            {
                producer.Produce("topic1", new Message <byte[], byte[]>
                {
                    Key   = serdes.Serialize("key", new SerializationContext()),
                    Value = serdes.Serialize($"coucou{i}", new SerializationContext())
                });
            }
            // CONSUME PAUSE AFTER maxBuffered + 1 messages
            System.Threading.Thread.Sleep(50);

            // Add one message more with consumer in stream thread in pause
            producer.Produce("topic1", new Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key", new SerializationContext()),
                Value = serdes.Serialize($"coucou{maxBuffered+1}", new SerializationContext())
            });

            Assert.AreEqual(1, thread.ActiveTasks.Count());
            var task = thread.ActiveTasks.ToArray()[0];

            Assert.IsNotNull(task.Grouper);
            Assert.IsFalse(task.Grouper.AllPartitionsBuffered);
            Assert.AreEqual(maxBuffered + 1, task.Grouper.NumBuffered());
            Assert.AreEqual(maxBuffered + 1, task.Grouper.NumBuffered(new TopicPartition("topic1", 0)));
            Assert.AreEqual(0, task.Grouper.NumBuffered(new TopicPartition("topic2", 0)));

            producer.Produce("topic2", new Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key", new SerializationContext()),
                Value = serdes.Serialize($"test", new SerializationContext())
            });

            List <ConsumeResult <byte[], byte[]> > records = new List <ConsumeResult <byte[], byte[]> >();

            do
            {
                records.AddRange(consumer.ConsumeRecords(TimeSpan.FromMilliseconds(100)).ToList());
            } while (records.Count() <= 12);

            Assert.AreEqual(maxBuffered + 2, records.Count());
            for (int i = 0; i < maxBuffered + 2; ++i)
            {
                var message = records.ToArray()[i];
                Assert.AreEqual("key", serdes.Deserialize(message.Message.Key, new SerializationContext()));
                Assert.IsTrue(serdes.Deserialize(message.Message.Value, new SerializationContext()).Contains($"coucou{i}-"));
            }

            token.Cancel();
            thread.Dispose();
        }
        public void StreamThreadNormalWorkflowWithRebalancing()
        {
            List <ThreadState> allStates = new List <ThreadState>();
            var expectedStates           = new List <ThreadState>
            {
                ThreadState.CREATED,
                ThreadState.STARTING,
                ThreadState.PARTITIONS_ASSIGNED,
                ThreadState.RUNNING,
                ThreadState.PARTITIONS_REVOKED,
                ThreadState.PARTITIONS_ASSIGNED,
                ThreadState.RUNNING,
                ThreadState.PENDING_SHUTDOWN,
                ThreadState.DEAD
            };

            var source = new System.Threading.CancellationTokenSource();
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId = "test";
            config.Guarantee     = ProcessingGuarantee.AT_LEAST_ONCE;
            config.PollMs        = 1;

            var consumeConfig = config.Clone();

            consumeConfig.ApplicationId = "consume-test";

            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder.Stream <string, string>("topic").To("topic2");

            var topo = builder.Build();

            var supplier = new MockKafkaSupplier(4);
            var producer = supplier.GetProducer(consumeConfig.ToProducerConfig());
            var consumer = supplier.GetConsumer(consumeConfig.ToConsumerConfig("test-consum"), null);

            consumer.Subscribe("topic2");

            var thread = StreamThread.Create(
                "thread-0", "c0",
                topo.Builder, config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                0) as StreamThread;

            allStates.Add(thread.State);
            thread.StateChanged += (t, o, n) =>
            {
                Assert.IsInstanceOf <ThreadState>(n);
                allStates.Add(n as ThreadState);
            };

            thread.Start(source.Token);
            // WAIT PARTITONS ASSIGNED
            System.Threading.Thread.Sleep(50);

            var thread2 = StreamThread.Create(
                "thread-1", "c1",
                topo.Builder, config,
                supplier, supplier.GetAdmin(config.ToAdminConfig("admin")),
                1) as StreamThread;

            thread2.Start(source.Token);
            // WAIT PARTITONS REBALANCING
            System.Threading.Thread.Sleep(50);

            producer.Produce("topic", new Message <byte[], byte[]>
            {
                Key   = serdes.Serialize("key1", new SerializationContext()),
                Value = serdes.Serialize("coucou", new SerializationContext())
            });
            //WAIT STREAMTHREAD PROCESS MESSAGE
            System.Threading.Thread.Sleep(100);
            var message = consumer.Consume(100);

            // 2 CONSUMER FOR THE SAME GROUP ID => TOPIC WITH 4 PARTITIONS
            Assert.AreEqual(2, thread.ActiveTasks.Count());
            Assert.AreEqual(2, thread2.ActiveTasks.Count());

            source.Cancel();
            thread.Dispose();
            thread2.Dispose();

            Assert.AreEqual("key1", serdes.Deserialize(message.Message.Key, new SerializationContext()));
            Assert.AreEqual("coucou", serdes.Deserialize(message.Message.Value, new SerializationContext()));
            Assert.AreEqual(expectedStates, allStates);
            // Destroy in memory cluster
            supplier.Destroy();
        }
        public void StreamTaskWithEXACTLY_ONCE()
        {
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId = "test-app";
            config.Guarantee     = ProcessingGuarantee.EXACTLY_ONCE;

            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder.Stream <string, string>("topic")
            .Map((k, v) => KeyValuePair.Create(k.ToUpper(), v.ToUpper()))
            .To("topic2");

            var    topology = builder.Build();
            TaskId id       = new TaskId {
                Id = 0, Partition = 0
            };
            var processorTopology = topology.Builder.BuildTopology(id);

            var supplier = new SyncKafkaSupplier();
            var producer = supplier.GetProducer(config.ToProducerConfig());
            var consumer = supplier.GetConsumer(config.ToConsumerConfig(), null);


            var        part = new TopicPartition("topic", 0);
            StreamTask task = new StreamTask(
                "thread-0",
                id,
                new List <TopicPartition> {
                part
            },
                processorTopology,
                consumer,
                config,
                supplier,
                null,
                new MockChangelogRegister()
                , new StreamMetricsRegistry());

            task.GroupMetadata = consumer as SyncConsumer;
            task.InitializeStateStores();
            task.InitializeTopology();
            task.RestorationIfNeeded();
            task.CompleteRestoration();

            List <ConsumeResult <byte[], byte[]> > messages = new List <ConsumeResult <byte[], byte[]> >();
            int offset = 0;

            for (int i = 0; i < 5; ++i)
            {
                messages.Add(
                    new ConsumeResult <byte[], byte[]>
                {
                    Message = new Message <byte[], byte[]>
                    {
                        Key   = serdes.Serialize($"key{i + 1}", new SerializationContext()),
                        Value = serdes.Serialize($"value{i + 1}", new SerializationContext())
                    },
                    TopicPartitionOffset = new TopicPartitionOffset(part, offset++)
                });
            }

            task.AddRecords(messages);

            Assert.IsTrue(task.CanProcess(DateTime.Now.GetMilliseconds()));

            while (task.CanProcess(DateTime.Now.GetMilliseconds()))
            {
                Assert.IsTrue(task.Process());
                Assert.IsTrue(task.CommitNeeded);
                task.Commit();
            }

            // CHECK IN TOPIC topic2
            consumer.Subscribe("topic2");
            List <ConsumeResult <byte[], byte[]> > results = new List <ConsumeResult <byte[], byte[]> >();
            ConsumeResult <byte[], byte[]>         result  = null;

            do
            {
                result = consumer.Consume(100);

                if (result != null)
                {
                    results.Add(result);
                    consumer.Commit(result);
                }
            } while (result != null);

            Assert.AreEqual(5, results.Count);
            for (int i = 0; i < 5; ++i)
            {
                Assert.AreEqual($"KEY{i + 1}", serdes.Deserialize(results[i].Message.Key, new SerializationContext()));
                Assert.AreEqual($"VALUE{i + 1}",
                                serdes.Deserialize(results[i].Message.Value, new SerializationContext()));
            }

            task.Close();
        }
        public void TaskManagerCommit()
        {
            var config = new StreamConfig <StringSerDes, StringSerDes>();

            config.ApplicationId = "test-app";
            var serdes  = new StringSerDes();
            var builder = new StreamBuilder();

            builder.Stream <string, string>("topic")
            .Map((k, v) => KeyValuePair.Create(k.ToUpper(), v.ToUpper()))
            .To("topic2");

            var topology = builder.Build();

            var supplier = new SyncKafkaSupplier();
            var producer = supplier.GetProducer(config.ToProducerConfig());
            var consumer = supplier.GetConsumer(config.ToConsumerConfig(), null);

            var taskCreator = new TaskCreator(topology.Builder, config, "thread-0", supplier, producer);
            var taskManager = new TaskManager(topology.Builder, taskCreator, supplier.GetAdmin(config.ToAdminConfig("admin")), consumer);

            taskManager.CreateTasks(
                new List <TopicPartition>
            {
                new TopicPartition("topic", 0),
                new TopicPartition("topic", 1),
                new TopicPartition("topic", 2),
                new TopicPartition("topic", 3),
            });

            Assert.AreEqual(4, taskManager.ActiveTasks.Count());

            var part = new TopicPartition("topic", 0);
            var task = taskManager.ActiveTaskFor(part);
            List <ConsumeResult <byte[], byte[]> > messages = new List <ConsumeResult <byte[], byte[]> >();
            int offset = 0;

            for (int i = 0; i < 5; ++i)
            {
                messages.Add(
                    new ConsumeResult <byte[], byte[]>
                {
                    Message = new Message <byte[], byte[]>
                    {
                        Key   = serdes.Serialize($"key{i + 1}", new SerializationContext()),
                        Value = serdes.Serialize($"value{i + 1}", new SerializationContext())
                    },
                    TopicPartitionOffset = new TopicPartitionOffset(part, offset++)
                });
            }

            task.AddRecords(messages);

            Assert.IsTrue(task.CanProcess(DateTime.Now.GetMilliseconds()));

            while (task.CanProcess(DateTime.Now.GetMilliseconds()))
            {
                Assert.IsTrue(task.Process());
            }

            // ONLY ONE TASK HAVE BEEN RECORDS
            Assert.AreEqual(1, taskManager.CommitAll());

            // CHECK IN TOPIC topic2
            consumer.Subscribe("topic2");
            List <ConsumeResult <byte[], byte[]> > results = new List <ConsumeResult <byte[], byte[]> >();
            ConsumeResult <byte[], byte[]>         result  = null;

            do
            {
                result = consumer.Consume(100);

                if (result != null)
                {
                    results.Add(result);
                    consumer.Commit(result);
                }
            } while (result != null);

            Assert.AreEqual(5, results.Count);
            for (int i = 0; i < 5; ++i)
            {
                Assert.AreEqual($"KEY{i + 1}", serdes.Deserialize(results[i].Message.Key, new SerializationContext()));
                Assert.AreEqual($"VALUE{i+1}", serdes.Deserialize(results[i].Message.Value, new SerializationContext()));
            }

            // NO RECORD IN THIS TASKS
            part = new TopicPartition("topic", 2);
            task = taskManager.ActiveTaskFor(part);
            Assert.IsFalse(task.CanProcess(DateTime.Now.GetMilliseconds()));
            Assert.IsFalse(task.Process());

            taskManager.Close();
        }