public void SchemaRegistryAvroSerializerConfig() { var config = new StreamConfig { SubjectNameStrategy = SubjectNameStrategy.TopicRecord, AutoRegisterSchemas = true, UseLatestVersion = false, BufferBytes = 1024 }; var serdes = new SchemaAvroSerDes <Order>(); var schemaConfig = serdes.GetSerializerConfig(config); Assert.AreEqual(Confluent.SchemaRegistry.SubjectNameStrategy.TopicRecord, schemaConfig.SubjectNameStrategy); Assert.AreEqual(true, schemaConfig.AutoRegisterSchemas); Assert.AreEqual(false, schemaConfig.UseLatestVersion); Assert.AreEqual(1024, schemaConfig.BufferBytes); }
public void SchemaRegistryConfigWithBasicAuth() { var config = new StreamConfig(); config.SchemaRegistryUrl = "mock://test"; config.BasicAuthUserInfo = "user:password"; config.BasicAuthCredentialsSource = (int)AuthCredentialsSource.UserInfo; config.SchemaRegistryMaxCachedSchemas = 1; config.SchemaRegistryRequestTimeoutMs = 30; var serdes = new SchemaAvroSerDes <Order>(); var schemaConfig = serdes.GetConfig(config); Assert.AreEqual(1, schemaConfig.MaxCachedSchemas); Assert.AreEqual(30, schemaConfig.RequestTimeoutMs); Assert.AreEqual("mock://test", schemaConfig.Url); Assert.AreEqual("user:password", schemaConfig.BasicAuthUserInfo); Assert.AreEqual(AuthCredentialsSource.UserInfo, schemaConfig.BasicAuthCredentialsSource); }
public async Task exec(IConfiguration config, IServiceProvider services) { Console.WriteLine("Process"); var destTopic = config["spring.cloud.stream.bindings.output.destination"]; Console.WriteLine(destTopic); using (var scope = services.CreateScope()) { this._dataService = scope.ServiceProvider .GetRequiredService <IDataService>(); bool isRunningState = false; var timeout = TimeSpan.FromSeconds(10); DateTime dt = DateTime.Now; Order[] capture = this._dataService.readData(); // Inyectamos los datos obtenidos al Stream var sConfig = new StreamConfig <StringSerDes, StringSerDes>(); sConfig.ApplicationId = config["SPRING_CLOUD_APPLICATION_GUID"]; sConfig.BootstrapServers = config["SPRING_CLOUD_STREAM_KAFKA_BINDER_BROKERS"]; sConfig.SchemaRegistryUrl = config["SchemaRegistryUrl"]; sConfig.AutoRegisterSchemas = true; sConfig.NumStreamThreads = 10; sConfig.Acks = Acks.All; sConfig.AddConsumerConfig("allow.auto.create.topics", "true"); sConfig.InnerExceptionHandler = (e) => ExceptionHandlerResponse.CONTINUE; var schemaRegistryClient = new CachedSchemaRegistryClient (new SchemaRegistryConfig { Url = sConfig.SchemaRegistryUrl }); var supplier = new SyncKafkaSupplier(new KafkaLoggerAdapter(sConfig)); var producerConfig = sConfig.ToProducerConfig(); var adminConfig = sConfig.ToAdminConfig(sConfig.ApplicationId); var admin = supplier.GetAdmin(adminConfig); // try // { // var topic = new TopicSpecification // { // Name = destTopic, // NumPartitions = 1, // ReplicationFactor = 3 // }; // var topicProduct = new TopicSpecification // { // Name = "product-external", // NumPartitions = 1, // ReplicationFactor = 3 // }; // IList<TopicSpecification> topics = new List<TopicSpecification>(); // topics.Add(topic); // topics.Add(topicProduct); // await admin.CreateTopicsAsync(topics); // } // catch (Exception topicExists) // { // Console.WriteLine("Topic alreade exists"); // Console.Write(topicExists); // } var producer = supplier.GetProducer(producerConfig); StreamBuilder builder = new StreamBuilder(); var serdes = new SchemaAvroSerDes <Order>(); var keySerdes = new Int32SerDes(); builder.Table(destTopic, keySerdes, serdes, InMemory <int, Order> .As(config["table"])); var t = builder.Build(); KafkaStream stream = new KafkaStream(t, sConfig, supplier); stream.StateChanged += (old, @new) => { if (@new.Equals(KafkaStream.State.RUNNING)) { isRunningState = true; } }; await stream.StartAsync(); while (!isRunningState) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } if (isRunningState) { // //create a well formatted Endpoint in external topic var endpProducer = new ProducerBuilder <byte[], Endpoint>(producerConfig) .SetValueSerializer(new AvroSerializer <Endpoint>(schemaRegistryClient, new AvroSerializerConfig { AutoRegisterSchemas = true }).AsSyncOverAsync()).Build(); //create a well formatted Product in external topic var productProducer = new ProducerBuilder <byte[], Product>(producerConfig) .SetValueSerializer(new AvroSerializer <Product>(schemaRegistryClient, new AvroSerializerConfig { AutoRegisterSchemas = true }).AsSyncOverAsync()).Build(); for (int k = 1; k < 10; k++) { endpProducer.Produce("api-endpoints", new Message <byte[], Endpoint> { Key = new Int32SerDes().Serialize(k, new SerializationContext()), Value = new Endpoint { endpoint_id = ("endpoint" + k), endpoint_url = ("http://endpoint" + k + "/"), http_method = "POST" } }, (d) => { if (d.Status == PersistenceStatus.Persisted) { Console.WriteLine("Endpoint Message sent !"); } }); productProducer.Produce("product-external", new Message <byte[], Product> { Key = new Int32SerDes().Serialize(1, new SerializationContext()), Value = new Product { name = "Producto de Software", price = 1234.5F, product_id = 3 } }, (d) => { if (d.Status == PersistenceStatus.Persisted) { Console.WriteLine("Product Message sent !"); } }); } Thread.Sleep(10); for (int k = 1; k < 10; k++) { producer.Produce(destTopic, new Confluent.Kafka.Message <byte[], byte[]> { Key = keySerdes.Serialize(k, new SerializationContext()), Value = serdes.Serialize(new Order { order_id = k, price = 123.5F, product_id = k }, new SerializationContext()) }, (d) => { if (d.Status == PersistenceStatus.Persisted) { Console.WriteLine("Order Message sent !"); } }); } Thread.Sleep(50); } } }
public async Task process(IConfiguration config) { Console.WriteLine("Process"); var sConfig = new StreamConfig <StringSerDes, StringSerDes>(); sConfig.ApplicationId = config["SPRING_CLOUD_APPLICATION_GROUP"]; sConfig.BootstrapServers = config["SPRING_CLOUD_STREAM_KAFKA_BINDER_BROKERS"]; sConfig.AutoOffsetReset = AutoOffsetReset.Earliest; sConfig.SchemaRegistryUrl = config["SchemaRegistryUrl"]; sConfig.AutoRegisterSchemas = true; sConfig.NumStreamThreads = 1; sConfig.Acks = Acks.All; //sConfig.Debug = "consumer,cgrp,topic,fetch"; sConfig.AddConsumerConfig("allow.auto.create.topics", "true"); sConfig.MaxTaskIdleMs = 50; sConfig.InnerExceptionHandler = (e) => ExceptionHandlerResponse.CONTINUE; var timeout = TimeSpan.FromSeconds(10); DateTime dt = DateTime.Now; OrderProduct op = new OrderProduct(); var serializer = new SchemaAvroSerDes <OrderProduct>(); StreamBuilder builder = new StreamBuilder(); var table = builder.Table(config["simpleNetcoreProcessor.externaltopic"], new Int32SerDes(), new SchemaAvroSerDes <Product>(), InMemory <int, Product> .As(config["simpleNetcoreProcessor.table"])); builder.Stream <int, Order, Int32SerDes, SchemaAvroSerDes <Order> >(config["spring.cloud.stream.bindings.input.destination"]) .Join(table, (order, product) => { Console.WriteLine("Order: " + order?.order_id); Console.WriteLine("Product: " + product?.product_id); op = new OrderProduct { order_id = order.order_id, price = order.price, product_id = product.product_id, product_name = product.name, product_price = product.price }; return(op); }) .To <Int32SerDes, SchemaAvroSerDes <OrderProduct> >(config["spring.cloud.stream.bindings.output.destination"]); Topology t = builder.Build(); Console.WriteLine(t.Describe()); KafkaStream stream = new KafkaStream(t, sConfig); bool isRunningState = false; stream.StateChanged += (old, @new) => { if (@new.Equals(KafkaStream.State.RUNNING)) { isRunningState = true; } }; await stream.StartAsync(); while (!isRunningState) { Thread.Sleep(250); if (DateTime.Now > dt + timeout) { break; } } if (isRunningState) { Console.WriteLine("Stream running state is " + isRunningState.ToString()); } }