public void StreamThreadNormalWorkflow() { bool metricsReporterCalled = false; List <ThreadState> allStates = new List <ThreadState>(); var expectedStates = new List <ThreadState> { ThreadState.CREATED, ThreadState.STARTING, ThreadState.PARTITIONS_ASSIGNED, ThreadState.RUNNING, ThreadState.PENDING_SHUTDOWN, ThreadState.DEAD }; var source = new System.Threading.CancellationTokenSource(); var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.Guarantee = ProcessingGuarantee.AT_LEAST_ONCE; config.PollMs = 1; config.MetricsReporter = (sensor) => { metricsReporterCalled = true; }; config.AddOrUpdate(StreamConfig.metricsIntervalMsCst, 10); var serdes = new StringSerDes(); var builder = new StreamBuilder(); builder.Stream <string, string>("topic").To("topic2"); var topo = builder.Build(); var supplier = new SyncKafkaSupplier(); var producer = supplier.GetProducer(config.ToProducerConfig()); var consumer = supplier.GetConsumer(config.ToConsumerConfig("test-consum"), null); consumer.Subscribe("topic2"); var thread = StreamThread.Create( "thread-0", "c0", topo.Builder, new StreamMetricsRegistry(), config, supplier, supplier.GetAdmin(config.ToAdminConfig("admin")), 0) as StreamThread; allStates.Add(thread.State); thread.StateChanged += (t, o, n) => { Assert.IsInstanceOf <ThreadState>(n); allStates.Add(n as ThreadState); }; thread.Start(source.Token); producer.Produce("topic", new Confluent.Kafka.Message <byte[], byte[]> { Key = serdes.Serialize("key1", new SerializationContext()), Value = serdes.Serialize("coucou", new SerializationContext()) }); //WAIT STREAMTHREAD PROCESS MESSAGE System.Threading.Thread.Sleep(100); var message = consumer.Consume(100); source.Cancel(); thread.Dispose(); Assert.AreEqual("key1", serdes.Deserialize(message.Message.Key, new SerializationContext())); Assert.AreEqual("coucou", serdes.Deserialize(message.Message.Value, new SerializationContext())); Assert.AreEqual(expectedStates, allStates); Assert.IsTrue(metricsReporterCalled); }