public static async Task Main(string[] args) { var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test-app2"; config.BootstrapServers = "localhost:9092"; config.AutoOffsetReset = AutoOffsetReset.Earliest; config.StateDir = Path.Combine("."); config.CommitIntervalMs = 5000; config.Logger = LoggerFactory.Create(builder => { builder.SetMinimumLevel(LogLevel.Information); builder.AddLog4Net(); }); StreamBuilder builder = new StreamBuilder(); var table = builder.GlobalTable <string, string>("topic"); builder.Stream <string, string>("input") .GroupBy((k, v) => v) .Count() .ToStream() .To("output"); Topology t = builder.Build(); KafkaStream stream = new KafkaStream(t, config); Console.CancelKeyPress += (o, e) => stream.Dispose(); await stream.StartAsync(); }
static async Task Main(string[] args) { string inputTopic = "input", outputTopic = "output"; int numberPartitions = 4; await CreateTopics(inputTopic, outputTopic, numberPartitions); var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "sample-streamiz-demo"; config.BootstrapServers = "localhost:9092"; config.AutoOffsetReset = AutoOffsetReset.Earliest; config.StateDir = Path.Combine("."); config.CommitIntervalMs = 5000; config.Guarantee = ProcessingGuarantee.AT_LEAST_ONCE; config.MetricsRecording = MetricsRecordingLevel.DEBUG; config.UsePrometheusReporter(9090, true); StreamBuilder builder = new StreamBuilder(); builder.Stream <string, string>(inputTopic) .FlatMapValues((v) => v.Split(" ").AsEnumerable()) .GroupBy((k, v) => v) .Count() .ToStream() .MapValues((v) => v.ToString()) .To(outputTopic); Topology t = builder.Build(); KafkaStream stream = new KafkaStream(t, config); Console.CancelKeyPress += (o, e) => stream.Dispose(); await stream.StartAsync(); }
static async Task Main(string[] args) { CancellationTokenSource source = new CancellationTokenSource(); var config = new StreamConfig(); config.ApplicationId = "test-app"; config.BootstrapServers = "192.168.56.1:9092"; config.SaslMechanism = SaslMechanism.Plain; config.SaslUsername = "******"; config.SaslPassword = "******"; config.SecurityProtocol = SecurityProtocol.SaslPlaintext; config.AutoOffsetReset = AutoOffsetReset.Earliest; // NEED FOR SchemaAvroSerDes config.SchemaRegistryUrl = "http://192.168.56.1:8081"; config.AutoRegisterSchemas = false; StreamBuilder builder = new StreamBuilder(); builder.Stream <string, Person, StringSerDes, SchemaAvroSerDes <Person> >("person") .Filter((k, v) => v.age >= 18) .MapValues((v) => $"{v.firstName}-{v.lastName}-{v.age}") .To <StringSerDes, StringSerDes>("output"); Topology t = builder.Build(); KafkaStream stream = new KafkaStream(t, config); Console.CancelKeyPress += (o, e) => { stream.Dispose(); }; await stream.StartAsync(); }
public async Task BuildGlobalStateStore() { var timeout = TimeSpan.FromSeconds(10); bool isRunningState = false; DateTime dt = DateTime.Now; var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; config.PollMs = 1; var builder = new StreamBuilder(); builder.GlobalTable <string, string>("test", InMemory <string, string> .As("store")); var supplier = new SyncKafkaSupplier(); var producer = supplier.GetProducer(new ProducerConfig()); var t = builder.Build(); var stream = new KafkaStream(t, config, supplier); stream.StateChanged += (old, @new) => { if (@new.Equals(KafkaStream.State.RUNNING)) { isRunningState = true; } }; await stream.StartAsync(); while (!isRunningState) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } Assert.IsTrue(isRunningState); if (isRunningState) { var stringSerdes = new StringSerDes(); producer.Produce("test", new Message <byte[], byte[]> { Key = stringSerdes.Serialize("key", new SerializationContext()), Value = stringSerdes.Serialize("value", new SerializationContext()) }); Thread.Sleep(250); var store = stream.Store(StoreQueryParameters.FromNameAndType("store", QueryableStoreTypes.KeyValueStore <string, string>())); Assert.IsNotNull(store); Assert.AreEqual(1, store.ApproximateNumEntries()); } stream.Dispose(); }
static async Task Main(string[] args) { CancellationTokenSource source = new CancellationTokenSource(); var config = new StreamConfig(); config.ApplicationId = "test-app"; config.BootstrapServers = "localhost:9092"; // NEED FOR SchemaAvroSerDes config.SchemaRegistryUrl = "http://localhost:8081"; config.AutoRegisterSchemas = true; StreamBuilder builder = new StreamBuilder(); var table = builder.Table("product", new Int32SerDes(), new SchemaAvroSerDes <Product>(), InMemory <int, Product> .As("product-store")); var orders = builder.Stream <int, Order, Int32SerDes, SchemaAvroSerDes <Order> >("orders"); orders.Join(table, (order, product) => new OrderProduct { order_id = order.order_id, price = order.price, product_id = product.product_id, product_name = product.name, product_price = product.price }) .To <Int32SerDes, SchemaAvroSerDes <OrderProduct> >("orders-output"); orders .GroupByKey() .Aggregate <OrderAgg, SchemaAvroSerDes <OrderAgg> >( () => new OrderAgg(), (key, order, agg) => { agg.order_id = order.order_id; agg.price = order.price; agg.product_id = order.product_id; agg.totalPrice += order.price; return(agg); }) .ToStream() .Print(Printed <int, OrderAgg> .ToOut()); Topology t = builder.Build(); KafkaStream stream = new KafkaStream(t, config); Console.CancelKeyPress += (o, e) => { stream.Dispose(); }; await stream.StartAsync(); }
public async Task GetWindowStateStore() { var timeout = TimeSpan.FromSeconds(10); bool isRunningState = false; DateTime dt = DateTime.Now; var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; var builder = new StreamBuilder(); builder .Stream <string, string>("test") .GroupByKey() .WindowedBy(TumblingWindowOptions.Of(TimeSpan.FromMinutes(1))) .Count(InMemoryWindows <string, long> .As("store")); var t = builder.Build(); var stream = new KafkaStream(t, config, new SyncKafkaSupplier()); stream.StateChanged += (old, @new) => { if (@new.Equals(KafkaStream.State.RUNNING)) { isRunningState = true; } }; await stream.StartAsync(); while (!isRunningState) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } Assert.IsTrue(isRunningState); if (isRunningState) { var store = stream.Store(StoreQueryParameters.FromNameAndType("store", QueryableStoreTypes.WindowStore <string, long>())); Assert.IsNotNull(store); } stream.Dispose(); }
public async Task GetKVStateStoreInvalidStateStoreException() { var timeout = TimeSpan.FromSeconds(10); bool state = false; DateTime dt = DateTime.Now; var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; var builder = new StreamBuilder(); builder .Stream <string, string>("test") .GroupByKey() .Count(InMemory <string, long> .As("store")); var t = builder.Build(); var stream = new KafkaStream(t, config, new SyncKafkaSupplier()); stream.StateChanged += (old, @new) => { if ([email protected](KafkaStream.State.RUNNING)) { if (!state) { Assert.Throws <InvalidStateStoreException>(() => stream.Store(StoreQueryParameters.FromNameAndType("store", QueryableStoreTypes.KeyValueStore <string, long>()))); state = true; } } }; await stream.StartAsync(); while (!state) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } Assert.IsTrue(state); stream.Dispose(); }
public void GetStateStoreBeforeRunningState() { var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; var builder = new StreamBuilder(); builder.Table("topic", InMemory <string, string> .As("store")); var t = builder.Build(); var stream = new KafkaStream(t, config, new SyncKafkaSupplier()); Assert.Throws <IllegalStateException>(() => stream.Store(StoreQueryParameters.FromNameAndType("store", QueryableStoreTypes.KeyValueStore <string, string>()))); stream.Dispose(); }
static async Task Main(string[] args) { CancellationTokenSource source = new CancellationTokenSource(); var config = new StreamConfig(); config.ApplicationId = "test-app"; config.BootstrapServers = "192.168.56.1:9092"; config.SaslMechanism = SaslMechanism.Plain; config.SaslUsername = "******"; config.SaslPassword = "******"; config.SecurityProtocol = SecurityProtocol.SaslPlaintext; config.AutoOffsetReset = AutoOffsetReset.Earliest; // NEED FOR SchemaAvroSerDes config.SchemaRegistryUrl = "http://192.168.56.1:8081"; config.AutoRegisterSchemas = true; StreamBuilder builder = new StreamBuilder(); var table = builder.Table("product", new Int32SerDes(), new SchemaAvroSerDes <Product>(), InMemory <int, Product> .As("product-store")); builder.Stream <int, Order, Int32SerDes, SchemaAvroSerDes <Order> >("orders") .Join(table, (order, product) => new OrderProduct { order_id = order.order_id, price = order.price, product_id = product.product_id, product_name = product.name, product_price = product.price }) .To <Int32SerDes, SchemaAvroSerDes <OrderProduct> >("orders-output"); Topology t = builder.Build(); KafkaStream stream = new KafkaStream(t, config); Console.CancelKeyPress += (o, e) => { stream.Dispose(); }; await stream.StartAsync(); }
public async Task StartKafkaStream() { var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; var builder = new StreamBuilder(); builder.Stream <string, string>("topic").To("topic2"); var t = builder.Build(); var stream = new KafkaStream(t, config, new SyncKafkaSupplier()); await stream.StartAsync(); Thread.Sleep(1500); stream.Dispose(); }
public async Task StartKafkaStreamWaitRunningState() { var timeout = TimeSpan.FromSeconds(10); bool isRunningState = false; DateTime dt = DateTime.Now; var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; var builder = new StreamBuilder(); builder.Stream <string, string>("topic").To("topic2"); var t = builder.Build(); var stream = new KafkaStream(t, config, new SyncKafkaSupplier()); stream.StateChanged += (old, @new) => { if (@new.Equals(KafkaStream.State.RUNNING)) { isRunningState = true; } }; await stream.StartAsync(); while (!isRunningState) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } var sensors = stream.Metrics(); Assert.IsTrue(sensors.Any()); stream.Dispose(); Assert.IsTrue(isRunningState); }
static async System.Threading.Tasks.Task Main(string[] args) { var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test-app"; config.BootstrapServers = "localhost:9092"; StreamBuilder builder = new StreamBuilder(); builder.Stream <string, string>("test") .To("test2"); Topology t = builder.Build(); KafkaStream stream = new KafkaStream(t, config); Console.CancelKeyPress += (o, e) => stream.Dispose(); await stream.StartAsync(); }
static async System.Threading.Tasks.Task Main(string[] args) { var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test-app"; config.BootstrapServers = "localhost:9093"; StreamBuilder builder = new StreamBuilder(); builder.Stream <string, string>("evenements") .GroupByKey() .WindowedBy(TumblingWindowOptions.Of(TimeSpan.FromMinutes(1))) .Aggregate(() => "", (k, v, va) => va += v) .ToStream() .Print(Printed <Windowed <String>, String> .ToOut()); Topology t = builder.Build(); KafkaStream stream = new KafkaStream(t, config); Console.CancelKeyPress += (o, e) => stream.Dispose(); await stream.StartAsync(); }
public async Task GetWindowElementInStateStore() { var timeout = TimeSpan.FromSeconds(10); bool isRunningState = false; DateTime dt = DateTime.Now; var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; config.PollMs = 10; var supplier = new SyncKafkaSupplier(); var producer = supplier.GetProducer(config.ToProducerConfig()); var builder = new StreamBuilder(); builder .Stream <string, string>("test") .GroupByKey() .WindowedBy(TumblingWindowOptions.Of(TimeSpan.FromMinutes(1))) .Count(InMemoryWindows <string, long> .As("store")); var t = builder.Build(); var stream = new KafkaStream(t, config, supplier); stream.StateChanged += (old, @new) => { if (@new.Equals(KafkaStream.State.RUNNING)) { isRunningState = true; } }; await stream.StartAsync(); while (!isRunningState) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } Assert.IsTrue(isRunningState); if (isRunningState) { var serdes = new StringSerDes(); dt = DateTime.Now; producer.Produce("test", new Confluent.Kafka.Message <byte[], byte[]> { Key = serdes.Serialize("key1", new SerializationContext()), Value = serdes.Serialize("coucou", new SerializationContext()), Timestamp = new Confluent.Kafka.Timestamp(dt) }); Thread.Sleep(50); var store = stream.Store(StoreQueryParameters.FromNameAndType("store", QueryableStoreTypes.WindowStore <string, long>())); Assert.IsNotNull(store); var @enum = store.All(); Assert.AreEqual(1, store.All().ToList().Count); var item = store.Fetch("key1", dt.AddMinutes(-1), dt.AddMinutes(1)); Assert.IsNotNull(item); Assert.IsTrue(item.MoveNext()); Assert.IsTrue(item.Current.HasValue); Assert.AreEqual(1, item.Current.Value.Value); item.Dispose(); } stream.Dispose(); }
public async Task GetRangeKVStateStore() { var timeout = TimeSpan.FromSeconds(10); bool isRunningState = false; DateTime dt = DateTime.Now; var config = new StreamConfig <StringSerDes, StringSerDes>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; config.PollMs = 10; var supplier = new SyncKafkaSupplier(); var producer = supplier.GetProducer(config.ToProducerConfig()); var builder = new StreamBuilder(); builder.Table("topic", InMemory <string, string> .As("store")); var t = builder.Build(); var stream = new KafkaStream(t, config, supplier); stream.StateChanged += (old, @new) => { if (@new.Equals(KafkaStream.State.RUNNING)) { isRunningState = true; } }; await stream.StartAsync(); while (!isRunningState) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } Assert.IsTrue(isRunningState); if (isRunningState) { var serdes = new StringSerDes(); producer.Produce("topic", new Confluent.Kafka.Message <byte[], byte[]> { Key = serdes.Serialize("key1", new SerializationContext()), Value = serdes.Serialize("coucou", new SerializationContext()) }); Thread.Sleep(50); var store = stream.Store(StoreQueryParameters.FromNameAndType("store", QueryableStoreTypes.KeyValueStore <string, string>())); Assert.IsNotNull(store); var list = store.Range("key1", "key2").ToList(); Assert.AreEqual(1, list.Count); var item = list[0]; Assert.IsNotNull(item); Assert.AreEqual("coucou", item.Value); Assert.AreEqual("key1", item.Key); } stream.Dispose(); }
public async Task KafkaStreamDeserializationExceptionHandlerContinueTest() { var _return = new List <KeyValuePair <string, string> >(); var timeout = TimeSpan.FromSeconds(10); bool isRunningState = false; DateTime dt = DateTime.Now; var config = new StreamConfig <SerdesThrowException, SerdesThrowException>(); config.ApplicationId = "test"; config.BootstrapServers = "127.0.0.1"; config.PollMs = 10; config.DeserializationExceptionHandler += (c, r, e) => ExceptionHandlerResponse.CONTINUE; var supplier = new SyncKafkaSupplier(); var producer = supplier.GetProducer(config.ToProducerConfig()); var builder = new StreamBuilder(); builder .Stream <string, string>("test") .Peek((k, v) => _return.Add(KeyValuePair.Create(k, v))); var t = builder.Build(); var stream = new KafkaStream(t, config, supplier); stream.StateChanged += (old, @new) => { if (@new.Equals(KafkaStream.State.RUNNING)) { isRunningState = true; } }; await stream.StartAsync(); while (!isRunningState) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } Assert.IsTrue(isRunningState); if (isRunningState) { var serdes = new StringSerDes(); dt = DateTime.Now; producer.Produce("test", new Confluent.Kafka.Message <byte[], byte[]> { Key = serdes.Serialize("k", new SerializationContext()), Value = serdes.Serialize("test", new SerializationContext()), Timestamp = new Confluent.Kafka.Timestamp(dt) }); producer.Produce("test", new Confluent.Kafka.Message <byte[], byte[]> { Key = serdes.Serialize("v", new SerializationContext()), Value = serdes.Serialize("test2", new SerializationContext()), Timestamp = new Confluent.Kafka.Timestamp(dt) }); Thread.Sleep(1000); var expected = new List <KeyValuePair <string, string> >(); expected.Add(KeyValuePair.Create("v", "test2")); Assert.AreEqual(expected, _return); } stream.Dispose(); }