public static ProducerSubmission WithTradingName(string tradingName)
            {
                var builder = new ProducerBuilder();
                builder.tradingName = tradingName;

                return builder.Build();
            }
Beispiel #2
0
        public void Test1(string codigo, double valor)
        {
            var broker = Configuration["ApacheKafka:Broker"];

            Logger.Information($"Broker Kafka: {broker}");

            var topic = Configuration["ApacheKafka:Topic"];

            Logger.Information($"Tópico: {topic}");

            var cotacaoAcao = new Acao()
            {
                Codigo        = codigo,
                Valor         = valor,
                CodCorretora  = COD_CORRETORA,
                NomeCorretora = NOME_CORRETORA
            };
            var conteudoAcao = JsonSerializer.Serialize(cotacaoAcao);

            Logger.Information($"Dados: {conteudoAcao}");

            var configKafka = new ProducerConfig
            {
                BootstrapServers = broker
            };

            using (var producer = new ProducerBuilder <Null, string>(configKafka).Build())
            {
                var result = producer.ProduceAsync(
                    topic,
                    new Message <Null, string>
                {
                    Value = conteudoAcao
                }).Result;

                Logger.Information(
                    $"Apache Kafka - Envio para o tópico {topic} concluído | " +
                    $"{conteudoAcao} | Status: { result.Status.ToString()}");
            }

            Logger.Information("Aguardando o processamento do Worker...");
            Thread.Sleep(
                Convert.ToInt32(Configuration["IntervaloProcessamento"]));

            var mongoDBConnection = Configuration["MongoDBConnection"];

            Logger.Information($"MongoDB Connection: {mongoDBConnection}");

            var mongoDatabase = Configuration["MongoDatabase"];

            Logger.Information($"MongoDB Database: {mongoDatabase}");

            var mongoCollection = Configuration["MongoCollection"];

            Logger.Information($"MongoDB Collection: {mongoCollection}");

            var acaoDocument = new MongoClient(mongoDBConnection)
                               .GetDatabase(mongoDatabase)
                               .GetCollection <AcaoDocument>(mongoCollection)
                               .Find(h => h.Codigo == codigo).SingleOrDefault();

            acaoDocument.Should().NotBeNull();
            acaoDocument.Codigo.Should().Be(codigo);
            acaoDocument.Valor.Should().Be(valor);
            acaoDocument.CodCorretora.Should().Be(COD_CORRETORA);
            acaoDocument.NomeCorretora.Should().Be(NOME_CORRETORA);
            acaoDocument.HistLancamento.Should().NotBeNullOrWhiteSpace();
            acaoDocument.DataReferencia.Should().NotBeNullOrWhiteSpace();
        }
Beispiel #3
0
        private static async Task GenerateMessage(List <Clickstream> clickstreamData, string brokerList,
                                                  string connStr, string topic, string caCertLocation)
        {
            System.Console.WriteLine("Starting clickstream generator...");

            try
            {
                var conf = new ProducerConfig
                {
                    BootstrapServers = brokerList,
                    SecurityProtocol = SecurityProtocol.SaslSsl,
                    SaslMechanism    = SaslMechanism.Plain,
                    SaslUsername     = "******",
                    SaslPassword     = connStr,
                    SslCaLocation    = caCertLocation,
                    LingerMs         = 5
                };

                // If serializers are not specified, default serializers from
                // `Confluent.Kafka.Serializers` will be automatically used where
                // available. Note: by default strings are encoded as UTF8.
                //using (var p = new ProducerBuilder<Null, string>(conf).Build())
                //{
                //    foreach (var clickstream in clickstreamData)
                //    {
                //        if (_token.IsCancellationRequested)
                //        {
                //            return;
                //        }

                //        try
                //        {
                //            var serializedString = JsonConvert.SerializeObject(clickstream);
                //            var dr = await p.ProduceAsync(topic, new Message<Null, string> { Value = serializedString });
                //            Console.WriteLine($"{DateTime.Now} > Delivered '{dr.Value}' to '{dr.TopicPartitionOffset}'");
                //        }
                //        catch (ProduceException<Null, string> e)
                //        {
                //            Console.WriteLine($"Delivery failed: {e.Error.Reason}");
                //        }
                //    }
                //}

                Action <DeliveryReport <Null, string> > handler = r =>
                                                                  Console.WriteLine(!r.Error.IsError
                        ? $"Delivered message to {r.TopicPartitionOffset}"
                        : $"Delivery Error: {r.Error.Reason}");

                using (var p = new ProducerBuilder <Null, string>(conf).Build())
                {
                    int sent = 0;
                    foreach (var clickstream in clickstreamData)
                    {
                        if (_token.IsCancellationRequested)
                        {
                            return;
                        }

                        var serializedString = JsonConvert.SerializeObject(clickstream);
                        p.Produce(topic, new Message <Null, string> {
                            Value = serializedString
                        }, handler);
                        sent++;
                        System.Console.WriteLine($"{DateTime.Now} > Sent #{sent}: {serializedString}");

                        await Task.Delay(20);
                    }

                    // Wait for up to 10 seconds for any inflight messages to be delivered.
                    p.Flush(TimeSpan.FromSeconds(10));
                }
            }
            catch (Exception e)
            {
                Console.WriteLine($"Exception Occurred - {e.Message}");
                throw;
            }
        }
Beispiel #4
0
 public KafkaPublisherBuilderExtensionsTest()
 {
     _ps = new ProducerSettings();
     _pb = new ProducerBuilder <SomeMessage>(_ps);
 }
Beispiel #5
0
        static async System.Threading.Tasks.Task Main(string[] args)
        {
            int           count  = 10;
            var           key    = string.Empty;
            List <string> topics = new List <string>();

            topics.Add("test");
            topics.Add("test2");

            var config = new ProducerConfig
            {
                BootstrapServers = "localhost:9092",
                ClientId         = Dns.GetHostName()
            };

            //Create a topic with 3 partions with a replication factor of 1
            //var createTopic = TopicHelper.CreateTopic(config, topic, 3, 1);

            using (var producer = new ProducerBuilder <string, string>(config).Build())
            {
                try
                {
                    topics.ForEach(async topic =>
                    {
                        for (var i = 0; i <= count; i++)
                        {
                            if (i % 2 == 0)
                            {
                                key = "ev";
                            }
                            else
                            {
                                key = "od";
                            }

                            var randomModel = new RandomModel
                            {
                                Key          = key,
                                Message      = $"Message with count: {i}",
                                RandomNumber = new Random().Next()
                            };

                            await producer.ProduceAsync(topic, new Message <string, string> {
                                Key = key, Value = JsonConvert.SerializeObject(randomModel)
                            });

                            Console.WriteLine($"{count} messages were produced to on topic {topic}");
                        }
                        ;

                        producer.Flush(TimeSpan.FromSeconds(35));
                    });
                }
                catch (Exception e)
                {
                    Console.WriteLine($"Error has occurred: {e.Message}");
                }

                Console.Read();
            }
        }
            public static ProducerSubmission WithEEEPlacedOnMarketBandType(EEEPlacedOnMarketBandType eeePlacedOnMarketBandType)
            {
                var builder = new ProducerBuilder();
                builder.eeePlacedOnMarketBandType = eeePlacedOnMarketBandType;

                return builder.Build();
            }
            public static ProducerSubmission WithSICCodes(List<SICCode> sicCodes)
            {
                var builder = new ProducerBuilder();
                builder.sicCodes = sicCodes;

                return builder.Build();
            }
Beispiel #8
0
        /// <summary>Initializes the transport by ensuring that the input queue has been created</summary>
        public void Initialize()
        {
            // ToDo: Allow configuring transport options via Rebus
            var producerConfig = new ProducerConfig
            {
                BootstrapServers        = _brokerList,
                ApiVersionRequest       = true,
                QueueBufferingMaxKbytes = 10240,
                //{ "socket.blocking.max.ms", 1 }, // **DEPRECATED * *No longer used.
#if DEBUG
                Debug = "msg",
#endif
                MessageTimeoutMs = 3000,
            };

            producerConfig.Set("request.required.acks", "-1");
            producerConfig.Set("queue.buffering.max.ms", "5");

            var builder = new ProducerBuilder <Ignore, TransportMessage>(producerConfig)
                          .SetKeySerializer(new IgnoreSerializer())
                          .SetValueSerializer(new TransportMessageSerializer())
                          .SetLogHandler(ProducerOnLog)
                          .SetStatisticsHandler(ProducerOnStatistics)
                          .SetErrorHandler(ProducerOnError);

            try
            {
                _producer = builder.Build();
            }
            catch (DllNotFoundException)
            {               // Try loading librdkafka.dll
                if (!Library.IsLoaded)
                {
                    string directory   = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().GetName().CodeBase.Substring(8));
                    var    pathToLibrd = System.IO.Path.Combine(directory, $"librdkafka/{(Environment.Is64BitOperatingSystem ? "x64" : "x86")}/librdkafka.dll");
                    _log.Info($"librdkafka is not loaded. Trying to load {pathToLibrd}");
                    Confluent.Kafka.Library.Load(pathToLibrd);
                    _log.Info($"Using librdkafka version: {Library.Version}");
                }
                _producer = builder.Build();
            }
            // ToDo: Allow configuring transport options
            var config = new ConsumerConfig
            {
                BootstrapServers    = _brokerList,
                ApiVersionRequest   = true,
                GroupId             = !string.IsNullOrEmpty(_groupId) ? _groupId : Guid.NewGuid().ToString("N"),
                EnableAutoCommit    = false,
                FetchWaitMaxMs      = 5,
                FetchErrorBackoffMs = 5,
                QueuedMinMessages   = 1000,
                SessionTimeoutMs    = 6000,
                //StatisticsIntervalMs = 5000,
#if DEBUG
                Debug = "msg",
#endif
                AutoOffsetReset    = AutoOffsetReset.Latest,
                EnablePartitionEof = true
            };

            config.Set("fetch.message.max.bytes", "10240");

            // Note: If a key or value deserializer is not set (as is the case below), the
            // deserializer corresponding to the appropriate type from Confluent.Kafka.Serdes
            // will be used automatically (where available). The default deserializer for string
            // is UTF8. The default deserializer for Ignore returns null for all input data
            // (including non-null data).
            _consumer = new ConsumerBuilder <Ignore, TransportMessage>(config)
                        .SetKeyDeserializer(Deserializers.Ignore)
                        .SetValueDeserializer(new TransportMessageDeserializer())
                        .SetLogHandler(ConsumerOnLog)
                        .SetErrorHandler(ConsumerOnError)
                        .SetStatisticsHandler(ConsumerOnStatistics)
                        .SetRebalanceHandler(ConsumerOnRebalance)
                        .Build();
            _consumer.Subscribe(_knownRoutes.Values);
        }
        public void NativeKafkaIConsumerSeekShouldNotFail()
        {
            var topic         = "topic-1";
            var group         = "group-1";
            var totalMessages = 105;

            var producerConfig = new ProducerConfig
            {
                BootstrapServers = Fixture.KafkaServer
            };

            var producer = new ProducerBuilder <string, string>(producerConfig).Build();

            foreach (var i in Enumerable.Range(1, totalMessages))
            {
                producer.Produce(topic, new Message <string, string>
                {
                    Key   = i.ToString(),
                    Value = i.ToString()
                });
            }

            producer.Flush();

            var consumerConfig = new ConsumerConfig
            {
                BootstrapServers = Fixture.KafkaServer,
                GroupId          = group,
                AutoOffsetReset  = AutoOffsetReset.Earliest,
                EnableAutoCommit = false
            };

            var consumer = new ConsumerBuilder <string, string>(consumerConfig)
                           .Build();

            consumer.Subscribe(topic);

            const int consumeTimeout = 50;
            const int testTimeout    = 10_000;

            var consumeCount = 0;

            var offsets = new List <TopicPartitionOffset>
            {
                new TopicPartitionOffset(new TopicPartition(topic, 0), 0),
                new TopicPartitionOffset(new TopicPartition(topic, 1), 0),
                new TopicPartitionOffset(new TopicPartition(topic, 2), 0),
            };

            var watch = Stopwatch.StartNew();

            while (consumeCount < totalMessages && watch.ElapsedMilliseconds < testTimeout)
            {
                var consumed = ConsumeAllMessages(consumer, totalMessages, offsets, consumeTimeout);
                //Log.Info($"Polled {consumed.consumed} messages");

                if (consumed.consumed != 0)
                {
                    for (var i = 0; i < 3; i++)
                    {
                        var seekResult = CheckForSeek(consumed.messages[i]);
                        consumeCount += seekResult.count;
                        offsets[i]    = seekResult.seekOffset;
                    }

                    foreach (var offset in offsets)
                    {
                        if (offset != null)
                        {
                            Log.Info($"Seeking {offset.TopicPartition} to {offset.Offset}");
                            consumer.Seek(offset);
                        }
                    }
                }

                Thread.Sleep(consumeTimeout);
            }
            watch.Stop();

            consumeCount.Should().Be(totalMessages);
        }
Beispiel #10
0
        /// <summary>
        /// get data from BMKG open data for Jawa Barat,clean the format and sent it into kafka
        /// </summary>
        /// <returns></returns>
        private static async Task UpdateWeather()
        {
            XmlTextReader reader = new XmlTextReader("http://data.bmkg.go.id/datamkg/MEWS/DigitalForecast/DigitalForecast-JawaBarat.xml");
            XmlDocument   doc    = new XmlDocument();

            doc.Load(reader);
            var m        = JsonConvert.SerializeXmlNode(doc);
            var data     = (JObject)JsonConvert.DeserializeObject <object>(m);
            var forecast = data["data"].Value <JObject>("forecast");
            var issue    = forecast["issue"].Value <string>("timestamp");
            var areas    = forecast["area"];
            var weather  = new Weather();

            foreach (var area in areas)
            {
                var name    = area["name"][0].Value <string>("#text");
                var areaObj = new Area
                {
                    City = name
                };
                if (area["parameter"] != null)
                {
                    var parameter   = area["parameter"][5];
                    var description = parameter.Value <string>("Description");
                    var timeRanges  = parameter["timerange"];
                    foreach (var time in timeRanges)
                    {
                        var datetime  = time["@datetime"].Value <string>();
                        var value     = time["value"][0].Value <string>("#text");
                        var timeRange = new TimeRange();
                        timeRange.Time  = datetime;
                        timeRange.Value = value;
                        areaObj.TimeRanges.Add(timeRange);
                    }
                }

                weather.Areas.Add(areaObj);
            }
            var config = new ProducerConfig {
                BootstrapServers = "localhost:9092"
            };

            Action <DeliveryReport <Null, string> > handler = r =>
                                                              Console.WriteLine(!r.Error.IsError
                ? $"Delivered message to {r.TopicPartitionOffset}"
                : $"Delivery Error: {r.Error.Reason}");

            using (var producer = new ProducerBuilder <Null, string>(config).Build())
            {
                await DeleteTopics(config.BootstrapServers, new string[] { "weather-topic" });

                foreach (var area in weather.Areas)
                {
                    var jsonPayload = JsonConvert.SerializeObject(area);
                    Console.WriteLine("send to kafka: ");
                    Console.WriteLine(jsonPayload);
                    await producer.ProduceAsync("weather-topic", new Message <Null, string> {
                        Value = jsonPayload
                    });

                    producer.Flush(TimeSpan.FromSeconds(10));
                    await Task.Delay(5000);
                }
            }
        }
Beispiel #11
0
        public static void AssignPastEnd(string bootstrapServers, string singlePartitionTopic, string partitionedTopic)
        {
            LogToFile("start AssignPastEnd");

            var consumerConfig = new ConsumerConfig
            {
                GroupId          = Guid.NewGuid().ToString(),
                BootstrapServers = bootstrapServers,
                SessionTimeoutMs = 6000
            };
            var producerConfig = new ProducerConfig {
                BootstrapServers = bootstrapServers
            };

            var testString = "hello world";

            DeliveryResult <Null, byte[]> dr;

            using (var producer = new ProducerBuilder <Null, byte[]>(producerConfig).Build())
            {
                dr = producer.ProduceAsync(singlePartitionTopic, new Message <Null, byte[]> {
                    Value = Serializers.Utf8.Serialize(testString, SerializationContext.Empty)
                }).Result;
                Assert.True(dr.Offset >= 0);
                producer.Flush(TimeSpan.FromSeconds(10));
            }

            consumerConfig.AutoOffsetReset = AutoOffsetReset.Latest;
            using (var consumer = new ConsumerBuilder <Null, byte[]>(consumerConfig).Build())
            {
                ConsumeResult <Null, byte[]> record;

                // Consume API
                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 1)
                });
                record = consumer.Consume(TimeSpan.FromSeconds(10));
                Assert.Null(record);
                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 2)
                });
                consumer.Consume(TimeSpan.FromSeconds(10));
                Assert.Null(record);
            }

            consumerConfig.AutoOffsetReset = AutoOffsetReset.Earliest;
            using (var consumer = new ConsumerBuilder <Null, byte[]>(consumerConfig).Build())
            {
                ConsumeResult <Null, byte[]> record;
                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 1)
                });
                record = consumer.Consume(TimeSpan.FromSeconds(10));
                Assert.Null(record);
                // Note: dr.Offset+2 is an invalid (c.f. dr.Offset+1 which is valid), so auto.offset.reset will come
                // into play here to determine which offset to start from (earliest). Due to the the produce call above,
                // there is guarenteed to be a message on the topic, so consumer.Consume will return true.
                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 2)
                });
                record = consumer.Consume(TimeSpan.FromSeconds(10));
                Assert.NotNull(record?.Message);
            }

            Assert.Equal(0, Library.HandleCount);
            LogToFile("end   AssignPastEnd");
        }
Beispiel #12
0
        public override async Task Run(ProducerConfig config, string kafkaTopic)
        {
            // Create the producer
            using (var producer = new ProducerBuilder <string, string>(config).Build())
            {
                Console.WriteLine("\n-----------------------------------------------------------------------");
                Console.WriteLine($"Producer {producer.Name} producing on topic {kafkaTopic}.");
                Console.WriteLine("-----------------------------------------------------------------------");
                Console.WriteLine("To create a kafka message with UTF-8 encoded key and value:");
                Console.WriteLine("> key value<Enter>");
                Console.WriteLine("To create a kafka message with a null key and UTF-8 encoded value:");
                Console.WriteLine("> value<enter>");
                Console.WriteLine("Ctrl-C to quit.\n");

                var cancelled = false;
                Console.CancelKeyPress += (_, e) =>
                {
                    e.Cancel  = true; // prevent the process from terminating.
                    cancelled = true;
                };

                while (!cancelled)
                {
                    Console.Write("> ");

                    string text;
                    try
                    {
                        text = Console.ReadLine();
                    }
                    catch (IOException)
                    {
                        // IO exception is thrown when ConsoleCancelEventArgs.Cancel == true.
                        break;
                    }
                    if (text == null)
                    {
                        // Console returned null before
                        // the CancelKeyPress was treated
                        break;
                    }

                    string key = null;
                    string val = text;

                    // split line if both key and value specified.
                    int index = text.IndexOf(" ");
                    if (index != -1)
                    {
                        key = text.Substring(0, index);
                        val = text.Substring(index + 1);
                    }

                    try
                    {
                        // Note: Awaiting the asynchronous produce request below prevents flow of execution
                        // from proceeding until the acknowledgement from the broker is received (at the
                        // expense of low throughput).
                        var deliveryReport = await producer.ProduceAsync(
                            kafkaTopic, new Message <string, string> {
                            Key = key, Value = val
                        });

                        Console.WriteLine($"Delivered message to: {deliveryReport.TopicPartitionOffset}");
                    }
                    catch (ProduceException <string, string> e)
                    {
                        Console.WriteLine($"Failed to deliver message: {e.Message} [{e.Error.Code}]");
                    }
                }
            }
        }
        public static void ConsumePartitionEOF(string bootstrapServers, string schemaRegistryServers)
        {
            var producerConfig = new ProducerConfig
            {
                BootstrapServers = bootstrapServers
            };

            var schemaRegistryConfig = new SchemaRegistryConfig
            {
                Url = schemaRegistryServers
            };

            using (var topic = new TemporaryTopic(bootstrapServers, 1))
                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var producer =
                               new ProducerBuilder <Null, User>(producerConfig)
                               .SetKeySerializer(Serializers.Null)
                               .SetValueSerializer(new AvroSerializer <User>(schemaRegistry))
                               .Build())
                    {
                        producer.ProduceAsync(topic.Name, new Message <Null, User> {
                            Value = new User {
                                name = "test"
                            }
                        });

                        var consumerConfig = new ConsumerConfig
                        {
                            BootstrapServers   = bootstrapServers,
                            GroupId            = Guid.NewGuid().ToString(),
                            SessionTimeoutMs   = 6000,
                            AutoOffsetReset    = AutoOffsetReset.Earliest,
                            EnablePartitionEof = true
                        };

                        using (var consumer =
                                   new ConsumerBuilder <Null, User>(consumerConfig)
                                   .SetKeyDeserializer(Deserializers.Null)
                                   .SetValueDeserializer(new AvroDeserializer <User>(schemaRegistry).AsSyncOverAsync())
                                   .SetPartitionsAssignedHandler((c, partitions)
                                                                 => partitions.Select(tp => new TopicPartitionOffset(tp, Offset.Beginning)))
                                   .Build())
                        {
                            consumer.Subscribe(topic.Name);

                            var cr1 = consumer.Consume();
                            Assert.NotNull(cr1);
                            Assert.NotNull(cr1.Message);
                            Assert.False(cr1.IsPartitionEOF);
                            var cr2 = consumer.Consume();
                            Assert.NotNull(cr2);
                            Assert.Null(cr2.Message);
                            Assert.True(cr2.IsPartitionEOF);
                        }

                        consumerConfig = new ConsumerConfig
                        {
                            BootstrapServers   = bootstrapServers,
                            GroupId            = Guid.NewGuid().ToString(),
                            SessionTimeoutMs   = 6000,
                            AutoOffsetReset    = AutoOffsetReset.Earliest,
                            EnablePartitionEof = false
                        };

                        using (var consumer =
                                   new ConsumerBuilder <Null, User>(consumerConfig)
                                   .SetKeyDeserializer(Deserializers.Null)
                                   .SetValueDeserializer(new AvroDeserializer <User>(schemaRegistry).AsSyncOverAsync())
                                   .SetPartitionsAssignedHandler((c, partitions)
                                                                 => partitions.Select(tp => new TopicPartitionOffset(tp, Offset.Beginning)))
                                   .Build())
                        {
                            consumer.Subscribe(topic.Name);

                            var cr1 = consumer.Consume();
                            Assert.NotNull(cr1);
                            Assert.NotNull(cr1.Message);
                            Assert.False(cr1.IsPartitionEOF);
                            var cr2 = consumer.Consume(TimeSpan.FromSeconds(2));
                            Assert.Null(cr2);
                        }
                    }
        }
Beispiel #14
0
        public static void AutoRegisterSchemaDisabled(string bootstrapServers, string schemaRegistryServers)
        {
            using (var topic = new TemporaryTopic(bootstrapServers, 1))
            {
                var producerConfig = new ProducerConfig
                {
                    BootstrapServers = bootstrapServers
                };

                var consumerConfig = new ConsumerConfig
                {
                    BootstrapServers = bootstrapServers,
                    GroupId          = Guid.NewGuid().ToString(),
                    SessionTimeoutMs = 6000,
                    AutoOffsetReset  = AutoOffsetReset.Earliest
                };

                var schemaRegistryConfig = new SchemaRegistryConfig
                {
                    SchemaRegistryUrl = schemaRegistryServers
                };

                // first a quick check the value case fails.

                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var producer =
                               new ProducerBuilder <string, int>(producerConfig)
                               .SetKeySerializer(new AvroSerializer <string>(schemaRegistry))
                               .SetValueSerializer(new AvroSerializer <int>(schemaRegistry, new AvroSerializerConfig {
                        AutoRegisterSchemas = false
                    }))
                               .Build())
                    {
                        Assert.Throws <SerializationException>(() =>
                        {
                            try
                            {
                                producer
                                .ProduceAsync(new Guid().ToString(), new Message <string, int> {
                                    Key = "test", Value = 112
                                })
                                .Wait();
                            }
                            catch (AggregateException e)
                            {
                                throw e.InnerException;
                            }
                        });
                    }

                // the following tests all check behavior in the key case.

                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var producer =
                               new ProducerBuilder <string, int>(producerConfig)
                               .SetKeySerializer(new AvroSerializer <string>(schemaRegistry, new AvroSerializerConfig {
                        AutoRegisterSchemas = false
                    }))
                               .SetValueSerializer(new AvroSerializer <int>(schemaRegistry))
                               .Build())
                    {
                        Assert.Throws <SerializationException>(() =>
                        {
                            try
                            {
                                producer.ProduceAsync(topic.Name, new Message <string, int> {
                                    Key = "test", Value = 112
                                }).Wait();
                            }
                            catch (AggregateException e)
                            {
                                throw e.InnerException;
                            }
                        });
                    }

                // allow auto register..
                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var producer =
                               new ProducerBuilder <string, int>(producerConfig)
                               .SetKeySerializer(new AvroSerializer <string>(schemaRegistry))
                               .SetValueSerializer(new AvroSerializer <int>(schemaRegistry))
                               .Build())
                    {
                        producer.ProduceAsync(topic.Name, new Message <string, int> {
                            Key = "test", Value = 112
                        }).Wait();
                    }

                // config with avro.serializer.auto.register.schemas == false should work now.
                using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig {
                    SchemaRegistryUrl = schemaRegistryServers
                }))
                    using (var producer =
                               new ProducerBuilder <string, int>(producerConfig)
                               .SetKeySerializer(new AvroSerializer <string>(schemaRegistry, new AvroSerializerConfig {
                        AutoRegisterSchemas = false
                    }))
                               .SetValueSerializer(new AvroSerializer <int>(schemaRegistry))
                               .Build())
                    {
                        producer.ProduceAsync(topic.Name, new Message <string, int> {
                            Key = "test", Value = 112
                        }).Wait();
                    }
            }
        }
            public static ProducerSubmission WithAnnualTurnover(int annualTurnover)
            {
                var builder = new ProducerBuilder();
                builder.annualTurnover = annualTurnover;

                return builder.Build();
            }
        public void Producer_MultiPartitioner(string bootstrapServers)
        {
            LogToFile("start Producer_MultiPartitioner");

            const int PARTITION_COUNT = 33;

            var producerConfig = new ProducerConfig
            {
                BootstrapServers = bootstrapServers
            };

            using (var topic1 = new TemporaryTopic(bootstrapServers, PARTITION_COUNT))
                using (var topic2 = new TemporaryTopic(bootstrapServers, PARTITION_COUNT))
                    using (var topic3 = new TemporaryTopic(bootstrapServers, 1))
                        using (var producer = new ProducerBuilder <string, Null>(producerConfig)
                                              .SetPartitioner(topic1.Name, (string topicName, int partitionCount, ReadOnlySpan <byte> keyData, bool keyIsNull) =>
                        {
                            Assert.Equal(topic1.Name, topicName);
                            var keyString = System.Text.UTF8Encoding.UTF8.GetString(keyData.ToArray());
                            Assert.Equal("hello", keyString);
                            return(8);
                        })
                                              .SetDefaultPartitioner((string topicName, int partitionCount, ReadOnlySpan <byte> keyData, bool keyIsNull) =>
                        {
                            Assert.True(topic2.Name == topicName || topic3.Name == topicName);
                            var keyString = System.Text.UTF8Encoding.UTF8.GetString(keyData.ToArray());
                            Assert.True(keyString == "world" || keyString == "kafka");
                            return(13);
                        })
                                              .Build()
                               ) {
                            Action <DeliveryReport <string, Null> > dh = (DeliveryReport <string, Null> dr) =>
                            {
                                Assert.Equal(ErrorCode.NoError, dr.Error.Code);
                                Assert.Equal(PersistenceStatus.Persisted, dr.Status);
                                Assert.True(Math.Abs((DateTime.UtcNow - dr.Message.Timestamp.UtcDateTime).TotalMinutes) < 1.0);
                                if (dr.Topic == topic1.Name)
                                {
                                    Assert.Equal("hello", dr.Message.Key);
                                }
                                else
                                {
                                    Assert.Equal("world", dr.Message.Key);
                                }
                            };

                            producer.Produce(topic1.Name, new Message <string, Null> {
                                Key = "hello"
                            }, dh);
                            producer.Produce(topic2.Name, new Message <string, Null> {
                                Key = "world"
                            }, dh);
                            // both default and topic-specific partitioners return a fixed value > number of partitions
                            // in topic 3. If either of these partitioners is errantly used in producing this message,
                            // this test will fail most of the time.
                            producer.Produce(topic3.Name, new Message <string, Null> {
                                Key = "kafka"
                            }, dh);
                            producer.Flush(TimeSpan.FromSeconds(10));
                        }

            Assert.Equal(0, Library.HandleCount);
            LogToFile("end   Producer_MultiPartitioner");
        }
            public static ProducerSubmission WithAnnualTurnOverBandType(AnnualTurnOverBandType annualTurnOverBandType)
            {
                var builder = new ProducerBuilder();
                builder.annualTurnOverBandType = annualTurnOverBandType;

                return builder.Build();
            }
        public static void AutoRegisterSchemaDisabled(string bootstrapServers, string schemaRegistryServers)
        {
            using (var topic = new TemporaryTopic(bootstrapServers, 1))
            {
                var producerConfig = new ProducerConfig
                {
                    BootstrapServers = bootstrapServers
                };

                var consumerConfig = new ConsumerConfig
                {
                    BootstrapServers = bootstrapServers,
                    GroupId          = Guid.NewGuid().ToString(),
                    SessionTimeoutMs = 6000,
                    AutoOffsetReset  = AutoOffsetReset.Earliest
                };

                var schemaRegistryConfig = new SchemaRegistryConfig
                {
                    SchemaRegistryUrl = schemaRegistryServers
                };

                // first a quick check the value case fails.

                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var producer =
                               new ProducerBuilder <string, int>(producerConfig)
                               .SetKeySerializer(new AvroSerializer <string>(schemaRegistry))
                               .SetValueSerializer(new AvroSerializer <int>(schemaRegistry, new AvroSerializerConfig {
                        AutoRegisterSchemas = false
                    }))
                               .Build())
                    {
                        Assert.Throws <SerializationException>(() =>
                        {
                            string guidTopic = new Guid().ToString();
                            try
                            {
                                producer
                                .ProduceAsync(guidTopic, new Message <string, int> {
                                    Key = "test", Value = 112
                                })
                                .GetAwaiter()
                                .GetResult();
                            }
                            catch (Exception e)
                            {
                                Assert.True(e is ProduceException <string, int>);
                                Assert.Equal(ErrorCode.Local_ValueSerialization, ((ProduceException <string, int>)e).Error.Code);

                                // Test message fields are appropriately set in the case of a serialization error.
                                Assert.Equal("test", ((ProduceException <string, int>)e).DeliveryResult.Key);
                                Assert.Equal(112, ((ProduceException <string, int>)e).DeliveryResult.Value);
                                Assert.Equal(Offset.Invalid, ((ProduceException <string, int>)e).DeliveryResult.Offset);
                                Assert.Equal(Partition.Any, ((ProduceException <string, int>)e).DeliveryResult.Partition);
                                Assert.Equal(guidTopic, ((ProduceException <string, int>)e).DeliveryResult.Topic);
                                Assert.Equal(PersistenceStatus.NotPersisted, ((ProduceException <string, int>)e).DeliveryResult.PersistenceStatus);
                                Assert.Equal(Timestamp.Default, ((ProduceException <string, int>)e).DeliveryResult.Timestamp);
                                Assert.Null(((ProduceException <string, int>)e).DeliveryResult.Headers);

                                // should be SerializationException.
                                throw e.InnerException;
                            }
                        });
                    }

                // the following tests all check behavior in the key case.

                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var producer =
                               new ProducerBuilder <string, int>(producerConfig)
                               .SetKeySerializer(new AvroSerializer <string>(schemaRegistry, new AvroSerializerConfig {
                        AutoRegisterSchemas = false
                    }))
                               .SetValueSerializer(new AvroSerializer <int>(schemaRegistry))
                               .Build())
                    {
                        Assert.Throws <SerializationException>(() =>
                        {
                            try
                            {
                                producer.ProduceAsync(topic.Name, new Message <string, int> {
                                    Key = "test", Value = 112
                                })
                                .GetAwaiter()
                                .GetResult();
                            }
                            catch (Exception e)
                            {
                                Assert.True(e is ProduceException <string, int>);
                                throw e.InnerException;
                            }
                        });
                    }

                // allow auto register..
                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var producer =
                               new ProducerBuilder <string, int>(producerConfig)
                               .SetKeySerializer(new AvroSerializer <string>(schemaRegistry))
                               .SetValueSerializer(new AvroSerializer <int>(schemaRegistry))
                               .Build())
                    {
                        producer.ProduceAsync(topic.Name, new Message <string, int> {
                            Key = "test", Value = 112
                        }).Wait();
                    }

                // config with avro.serializer.auto.register.schemas == false should work now.
                using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig {
                    SchemaRegistryUrl = schemaRegistryServers
                }))
                    using (var producer =
                               new ProducerBuilder <string, int>(producerConfig)
                               .SetKeySerializer(new AvroSerializer <string>(schemaRegistry, new AvroSerializerConfig {
                        AutoRegisterSchemas = false
                    }))
                               .SetValueSerializer(new AvroSerializer <int>(schemaRegistry))
                               .Build())
                    {
                        producer.ProduceAsync(topic.Name, new Message <string, int> {
                            Key = "test", Value = 112
                        }).Wait();
                    }
            }
        }
            public static ProducerSubmission WithProducerBusiness(ProducerBusiness producerBusiness)
            {
                var builder = new ProducerBuilder();
                builder.producerBusiness = producerBusiness;

                return builder.Build();
            }
        public static void ProduceConsumeGoogleRefProtobuf(string bootstrapServers, string schemaRegistryServers)
        {
            var producerConfig = new ProducerConfig {
                BootstrapServers = bootstrapServers
            };
            var schemaRegistryConfig = new SchemaRegistryConfig {
                Url = schemaRegistryServers
            };

            using (var topic = new TemporaryTopic(bootstrapServers, 1))
                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var producer =
                               new ProducerBuilder <string, WithGoogleRefs.TheRecord>(producerConfig)
                               .SetValueSerializer(new ProtobufSerializer <WithGoogleRefs.TheRecord>(schemaRegistry))
                               .Build())
                    {
                        var u = new WithGoogleRefs.TheRecord();
                        u.ListType             = 41;
                        u.ReceivedTime         = new Google.Protobuf.WellKnownTypes.Timestamp();
                        u.ReceivedTime.Seconds = 1591364591;

                        producer.ProduceAsync(topic.Name, new Message <string, WithGoogleRefs.TheRecord> {
                            Key = "test1", Value = u
                        }).Wait();

                        var consumerConfig = new ConsumerConfig
                        {
                            BootstrapServers = bootstrapServers,
                            GroupId          = Guid.NewGuid().ToString(),
                            AutoOffsetReset  = AutoOffsetReset.Earliest
                        };

                        // Test the protobuf deserializer can read this message
                        using (var consumer =
                                   new ConsumerBuilder <string, WithGoogleRefs.TheRecord>(consumerConfig)
                                   .SetValueDeserializer(new ProtobufDeserializer <WithGoogleRefs.TheRecord>().AsSyncOverAsync())
                                   .Build())
                        {
                            consumer.Subscribe(topic.Name);
                            var cr = consumer.Consume();
                            Assert.Equal(u.ListType.Value, cr.Message.Value.ListType.Value);
                            Assert.Equal(u.ReceivedTime.Seconds, cr.Message.Value.ReceivedTime.Seconds);
                        }

                        // Check the pre-data bytes are as expected.
                        using (var consumer = new ConsumerBuilder <string, byte[]>(consumerConfig).Build())
                        {
                            consumer.Subscribe(topic.Name);
                            var cr = consumer.Consume();
                            // magic byte + schema id + expected array index length + at least one data byte.
                            Assert.True(cr.Message.Value.Length >= 1 + 4 + 1 + 1);
                            // magic byte
                            Assert.Equal(0, cr.Message.Value[0]);
                            // array index (special value as an optimization)
                            Assert.Equal(0, cr.Message.Value[5]);
                        }

                        // Check the referenced schemas are in schema registry.
                        var subjects = schemaRegistry.GetAllSubjectsAsync().Result;
                        Assert.Contains("google/protobuf/timestamp.proto", subjects);
                        Assert.Contains("google/protobuf/wrappers.proto", subjects);
                    }
        }
Beispiel #21
0
        public static void Main(string[] args)
        {
            var provider = new RSACryptoServiceProvider();

            provider.FromXmlString(File.ReadAllText("C:\\Projects\\keys\\key.xml"));


            var conf = new ProducerConfig
            {
                BootstrapServers                 = "localhost:9093",
                LingerMs                         = 10,
                QueueBufferingMaxKbytes          = 1048576,
                MessageMaxBytes                  = 50 * 1024,
                EnableSslCertificateVerification = true,
                SecurityProtocol                 = SecurityProtocol.Ssl,
                SslCaLocation                    = "C:\\Projects\\keys\\cert-signed",
                Partitioner                      = Partitioner.Consistent,
                //SslCertificateLocation = "C:\\Projects\\kafka_repo\\keys\\pub.pem",
                //SslKeyLocation= "C:\\Projects\\kafka_repo\\keys\\pri.key",
                //SslKeyPassword = "******",
                //   Debug="msg"
                //QueueBufferingMaxMessages = 10000,
                // BatchNumMessages = 1,
                //  Acks = Acks.All
            };
            var filePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "data.dat");

            //byte[] info = Encoding.ASCII.GetBytes("Ajeesh B Nair");
            //byte[] info = Encoding.UTF8.GetBytes("Ajeesh B Nair asdddddddddddddddddddddddddddddddddddddddddddd asddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddassssssssqwdqw-Ajeesh B Nair asdddddddddddddddddddddddddddddddddddddddddddd asddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddassssssssqwdqw");//  File.ReadAllBytes(filePath);
            byte[] info              = File.ReadAllBytes(filePath);
            var    startMessage      = "---Start of Test---";
            var    endingMessage     = "---End of Test---";
            var    startMessageBytes = Encoding.ASCII.GetBytes(startMessage);
            var    endMessageBytes   = Encoding.ASCII.GetBytes(endingMessage);

            var b64StartMessage = Convert.ToBase64String(startMessageBytes);
            var b64EndMessage   = Convert.ToBase64String(endMessageBytes);

            var base64String = Convert.ToBase64String(info);

            rijAlg         = new RijndaelManaged();
            rijAlg.Mode    = CipherMode.ECB;
            rijAlg.Padding = PaddingMode.ISO10126;

            encryptor = rijAlg.CreateEncryptor(rijAlg.Key, rijAlg.IV);

            var key = rijAlg.Key;
            var iv  = rijAlg.IV;


            // Encrypt key and iv using RSA
            var encryptedK  = Convert.ToBase64String(Encrypt(key, provider), System.Base64FormattingOptions.None);
            var encryptedIV = Convert.ToBase64String(Encrypt(iv, provider), System.Base64FormattingOptions.None);

            var messageKey = string.Format("{0}:{1}", encryptedK, encryptedIV);



            Action <DeliveryReport <Null, string> > handler = r =>
                                                              Console.WriteLine(!r.Error.IsError
               ? $"Delivered message to {r.TopicPartitionOffset}"
               : $"Delivery Error: {r.Error.Reason}");

            using (var p = new ProducerBuilder <Null, string>(conf).Build())
            {
                var startTime = DateTime.Now.Ticks;

                for (int i = 0; i < 10000; ++i)
                {
                    try
                    {
                        // Encrypt data using AES

                        string dataToEncrypt;
                        if (i == 0)
                        {
                            dataToEncrypt = b64StartMessage;
                        }
                        else if (i == 9999)
                        {
                            dataToEncrypt = b64EndMessage;
                        }
                        else
                        {
                            dataToEncrypt = base64String;
                        }
                        var encryptedValue = EncryptData(dataToEncrypt);
                        var message        = String.Format("{0}#{1}", messageKey, Convert.ToBase64String(encryptedValue));
                        p.Produce("my-topic", new Message <Null, string> {
                            Value = message
                        }, null);

                        if (i % 49 == 0)
                        {
                            p.Flush();
                            Thread.Sleep(100);
                        }
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine(ex.ToString());
                        Thread.Sleep(5000);
                    }
                }

                var endTime        = DateTime.Now.Ticks;
                var totalTimeTaken = (endTime - startTime) / TimeSpan.TicksPerMillisecond;
                Console.WriteLine("Total time took to produce 10000 messages is {0}ms\r\n", totalTimeTaken);

                // wait for up to 10 seconds for any inflight messages to be delivered.
                p.Flush(TimeSpan.FromSeconds(10));
            }

            Console.ReadLine();
        }
        /// <summary>
        /// Main method for console app.
        /// </summary>
        /// <param name="args">No arguments used.</param>
        public static void Main(string[] args)
        {
            Console.WriteLine("Starting .net producer.");

            // Configure the location of the bootstrap server, and Confluent interceptors
            // and a partitioner compatible with Java - see https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
            var producerConfig = new ProducerConfig
            {
                // TODO: configure the location of the bootstrap server
                BootstrapServers = ???
                                   PluginLibraryPaths = "monitoring-interceptor",
                Partitioner = Partitioner.Murmur2Random,
            };

            // Load a driver id from an environment variable
            // if it isn't present use "driver-2"
            string driverId = System.Environment.GetEnvironmentVariable("DRIVER_ID");

            driverId = (!string.IsNullOrEmpty(driverId)) ? driverId : "driver-2";

            Action <DeliveryReport <string, string> > handler = r =>
                                                                Console.WriteLine(!r.Error.IsError
                    ? $"Sent Key:{r.Message.Key} Value:{r.Message.Value}"
                    : $"Delivery Error: {r.Error.Reason}");

            using (var producer = new ProducerBuilder <string, string>(producerConfig).Build())
            {
                Console.CancelKeyPress += (sender, e) =>
                {
                    // wait for up to 10 seconds for any inflight messages to be delivered.
                    Console.WriteLine("Flushing producer and exiting.");
                    producer.Flush(TimeSpan.FromSeconds(10));
                };

                var lines = File.ReadAllLines(Path.Combine(DriverFilePrefix, driverId + ".csv"));
                int i     = 0;
                // Loop forever over the driver CSV file..
                while (true)
                {
                    string line = lines[i];
                    try
                    {
                        // TODO: populate the message object
                        var message = new Message <string, string> {
                            Key = ???, Value = ???
                        };
                        // TODO: write the lat/long position to a Kafka topic
                        // TODO: configure handler as a callback to print the key and value
                        producer.Produce(???, ???, ???);
                    }
                    catch (ProduceException <string, string> e)
                    {
                        Console.WriteLine($"Delivery failed: {e.Error.Reason}");
                        break;
                    }

                    Thread.Sleep(1000);
                    i = (i + 1) % lines.Length;
                }
            }
        }
Beispiel #23
0
        static async Task Main(string[] args)
        {
            if (args.Length != 3)
            {
                Console.WriteLine("Usage: .. bootstrapServers schemaRegistryUrl topicName");
                return;
            }

            string bootstrapServers  = args[0];
            string schemaRegistryUrl = args[1];
            string topicName         = args[2];

            var producerConfig = new ProducerConfig
            {
                BootstrapServers = bootstrapServers
            };

            var schemaRegistryConfig = new SchemaRegistryConfig
            {
                // Note: you can specify more than one schema registry url using the
                // schema.registry.url property for redundancy (comma separated list).
                // The property name is not plural to follow the convention set by
                // the Java implementation.
                SchemaRegistryUrl = schemaRegistryUrl,
                // optional schema registry client properties:
                SchemaRegistryRequestTimeoutMs = 5000,
                SchemaRegistryMaxCachedSchemas = 10
            };

            var consumerConfig = new ConsumerConfig
            {
                BootstrapServers = bootstrapServers,
                GroupId          = "avro-specific-example-group"
            };

            var avroSerializerConfig = new AvroSerializerConfig
            {
                // optional Avro serializer properties:
                BufferBytes         = 100,
                AutoRegisterSchemas = true
            };

            // Note: The User class in this project was generated using the Confluent fork of the avrogen.exe tool
            // (avaliable from: https://github.com/confluentinc/avro/tree/confluent-fork) which includes modifications
            // that prevent namespace clashes with user namespaces that include the identifier 'Avro'. AvroSerializer
            // and AvroDeserializer are also compatible with classes generated by the official avrogen.exe tool
            // (available from: https://github.com/apache/avro), with the above limitation.

            CancellationTokenSource cts = new CancellationTokenSource();
            var consumeTask             = Task.Run(() =>
            {
                using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                    using (var consumer =
                               new ConsumerBuilder <string, User>(consumerConfig)
                               .SetKeyDeserializer(new AvroDeserializer <string>(schemaRegistry).AsSyncOverAsync())
                               .SetValueDeserializer(new AvroDeserializer <User>(schemaRegistry).AsSyncOverAsync())
                               .SetErrorHandler((_, e) => Console.WriteLine($"Error: {e.Reason}"))
                               .Build())
                    {
                        consumer.Subscribe(topicName);

                        try
                        {
                            while (true)
                            {
                                try
                                {
                                    var consumeResult = consumer.Consume(cts.Token);

                                    Console.WriteLine($"user name: {consumeResult.Message.Key}, favorite color: {consumeResult.Value.favorite_color}");
                                }
                                catch (ConsumeException e)
                                {
                                    Console.WriteLine($"Consume error: {e.Error.Reason}");
                                }
                            }
                        }
                        catch (OperationCanceledException)
                        {
                            consumer.Close();
                        }
                    }
            });

            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                using (var producer =
                           new ProducerBuilder <string, User>(producerConfig)
                           .SetKeySerializer(new AvroSerializer <string>(schemaRegistry))
                           .SetValueSerializer(new AvroSerializer <User>(schemaRegistry))
                           .Build())
                {
                    Console.WriteLine($"{producer.Name} producing on {topicName}. Enter user names, q to exit.");

                    int    i = 0;
                    string text;
                    while ((text = Console.ReadLine()) != "q")
                    {
                        User user = new User {
                            name = text, favorite_color = "green", favorite_number = i++
                        };
                        await producer
                        .ProduceAsync(topicName, new Message <string, User> {
                            Key = text, Value = user
                        })
                        .ContinueWith(task => task.IsFaulted
                            ? $"error producing message: {task.Exception.Message}"
                            : $"produced to: {task.Result.TopicPartitionOffset}");
                    }
                }

            cts.Cancel();
        }
        public static void ProduceConsume(string bootstrapServers, string schemaRegistryServers)
        {
            var producerConfig = new ProducerConfig
            {
                BootstrapServers = bootstrapServers
            };

            var consumerConfig = new ConsumerConfig
            {
                BootstrapServers   = bootstrapServers,
                GroupId            = Guid.NewGuid().ToString(),
                SessionTimeoutMs   = 6000,
                AutoOffsetReset    = AutoOffsetReset.Earliest,
                EnablePartitionEof = true
            };

            var schemaRegistryConfig = new SchemaRegistryConfig
            {
                SchemaRegistryUrl = schemaRegistryServers
            };

            var adminClientConfig = new AdminClientConfig
            {
                BootstrapServers = bootstrapServers
            };

            string topic = Guid.NewGuid().ToString();

            using (var adminClient = new AdminClientBuilder(adminClientConfig).Build())
            {
                adminClient.CreateTopicsAsync(
                    new List <TopicSpecification> {
                    new TopicSpecification {
                        Name = topic, NumPartitions = 1, ReplicationFactor = 1
                    }
                }).Wait();
            }

            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                using (var producer =
                           new ProducerBuilder <string, User>(producerConfig)
                           .SetKeySerializer(new AvroSerializer <string>(schemaRegistry))
                           .SetValueSerializer(new AvroSerializer <User>(schemaRegistry))
                           .Build())
                {
                    for (int i = 0; i < 100; ++i)
                    {
                        var user = new User
                        {
                            name            = i.ToString(),
                            favorite_number = i,
                            favorite_color  = "blue"
                        };

                        producer
                        .ProduceAsync(topic, new Message <string, User> {
                            Key = user.name, Value = user
                        })
                        .Wait();
                    }
                    Assert.Equal(0, producer.Flush(TimeSpan.FromSeconds(10)));
                }

            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                using (var consumer =
                           new ConsumerBuilder <string, User>(consumerConfig)
                           .SetKeyDeserializer(new AvroDeserializer <string>(schemaRegistry).AsSyncOverAsync())
                           .SetValueDeserializer(new AvroDeserializer <User>(schemaRegistry).AsSyncOverAsync())
                           .SetErrorHandler((_, e) => Assert.True(false, e.Reason))
                           .Build())
                {
                    consumer.Subscribe(topic);

                    int i = 0;
                    while (true)
                    {
                        var record = consumer.Consume(TimeSpan.FromMilliseconds(100));
                        if (record == null)
                        {
                            continue;
                        }
                        if (record.IsPartitionEOF)
                        {
                            break;
                        }

                        Assert.Equal(i.ToString(), record.Message.Key);
                        Assert.Equal(i.ToString(), record.Message.Value.name);
                        Assert.Equal(i, record.Message.Value.favorite_number);
                        Assert.Equal("blue", record.Message.Value.favorite_color);
                        i += 1;
                    }

                    Assert.Equal(100, i);

                    consumer.Close();
                }
        }
Beispiel #25
0
        /// <summary>
        /// Main method for console app.
        /// </summary>
        /// <param name="args">No arguments used.</param>
        public static void Main(string[] args)
        {
            Console.WriteLine("Starting .net Avro producer.");
            var producerConfig = new ProducerConfig {
                BootstrapServers = "kafka:9092", PluginLibraryPaths = "monitoring-interceptor"
            };
            var schemaRegistryConfig = new SchemaRegistryConfig {
                Url = "http://schema-registry:8081"
            };
            string driverId = System.Environment.GetEnvironmentVariable("DRIVER_ID");

            driverId = (!string.IsNullOrEmpty(driverId)) ? driverId : "driver-2";

            Action <DeliveryReport <string, PositionValue> > handler = r =>
                                                                       Console.WriteLine(!r.Error.IsError
                    ? $"Sent Key:{r.Message.Key} Latitude:{r.Message.Value.latitude} Longitude:{r.Message.Value.longitude}"
                    : $"Delivery Error: {r.Error.Reason}");

            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                using (var producer = new ProducerBuilder <string, PositionValue>(producerConfig)
                                      .SetValueSerializer(new AvroSerializer <PositionValue>(schemaRegistry).AsSyncOverAsync())
                                      .Build())
                {
                    Console.CancelKeyPress += (sender, e) =>
                    {
                        // wait for up to 10 seconds for any inflight messages to be delivered.
                        Console.WriteLine("Flushing producer and exiting.");
                        producer.Flush(TimeSpan.FromSeconds(10));
                    };

                    var lines = File.ReadAllLines(Path.Combine(DriverFilePrefix, "driver-1" + ".csv"));
                    int i     = 0;
                    while (true)
                    {
                        string line       = lines[i];
                        double latitude1  = double.Parse(line.Split(",")[0]);
                        double longitude1 = double.Parse(line.Split(",")[1]);
                        var    position   = new PositionValue {
                            latitude = latitude1, longitude = longitude1
                        };

                        try
                        {
                            producer.Produce(
                                KafkaTopic,
                                new Message <string, PositionValue> {
                                Key = driverId, Value = position
                            },
                                handler);
                        }
                        catch (ProduceException <string, string> e)
                        {
                            Console.WriteLine($"Delivery failed: {e.Error.Reason}");
                            break;
                        }

                        Thread.Sleep(1000);
                        i = (i + 1) % lines.Length;
                    }
                }
        }
Beispiel #26
0
        // Topic TestPartition => 3 partitions
        public static async Task Main(string[] args)
        {
            if (args.Length != 2)
            {
                Console.WriteLine("Usage: .. brokerList topicName");
                Console.WriteLine("Ex:  Ubuntu-x64:9092 Toto / TestPartition");
                return;
            }

            string brokerList = args[0];
            string topicName  = args[1];

            var config = new ProducerConfig {
                BootstrapServers = brokerList
            };

            using (var producer = new ProducerBuilder <string, string>(config).Build())
            {
                Console.WriteLine("\n-----------------------------------------------------------------------");
                Console.WriteLine($"Producer {producer.Name} producing on topic {topicName}.");
                Console.WriteLine("-----------------------------------------------------------------------");
                Console.WriteLine("To create a kafka message with UTF-8 encoded key and value:");
                Console.WriteLine("> key value<Enter>");
                Console.WriteLine("To create a kafka message with a null key and UTF-8 encoded value:");
                Console.WriteLine("> value<enter>");
                Console.WriteLine("Ctrl-C to quit.\n");

                var cancelled = false;
                Console.CancelKeyPress += (_, e) => {
                    e.Cancel  = true; // prevent the process from terminating.
                    cancelled = true;
                };

                while (!cancelled)
                {
                    Console.Write("> ");

                    string text;
                    try
                    {
                        text = Console.ReadLine();
                    }
                    catch (IOException)
                    {
                        // IO exception is thrown when ConsoleCancelEventArgs.Cancel == true.
                        break;
                    }
                    if (text == null)
                    {
                        // Console returned null before
                        // the CancelKeyPress was treated
                        break;
                    }

                    string key = null;
                    string val = text;

                    // split line if both key and value specified.
                    int index = text.IndexOf(" ");
                    if (index != -1)
                    {
                        key = text.Substring(0, index);
                        val = text.Substring(index + 1);
                    }

                    try
                    {
                        // Note: Awaiting the asynchronous produce request below prevents flow of execution
                        // from proceeding until the acknowledgement from the broker is received (at the
                        // expense of low throughput).
                        Console.WriteLine($"{key}|{val}");
                        var deliveryReport = await producer.ProduceAsync(
                            topicName, new Message <string, string> {
                            Key = key, Value = val
                        });

                        Console.WriteLine($"delivered to: {deliveryReport.TopicPartitionOffset}");
                    }
                    catch (ProduceException <string, string> e)
                    {
                        Console.WriteLine($"failed to deliver message: {e.Message} [{e.Error.Code}]");
                    }
                }

                // Since we are producing synchronously, at this point there will be no messages
                // in-flight and no delivery reports waiting to be acknowledged, so there is no
                // need to call producer.Flush before disposing the producer.
            }
        }
Beispiel #27
0
        public void AssignOverloads(string bootstrapServers)
        {
            LogToFile("start AssignOverloads");

            var consumerConfig = new ConsumerConfig
            {
                GroupId          = Guid.NewGuid().ToString(),
                BootstrapServers = bootstrapServers,
                SessionTimeoutMs = 6000,
                EnableAutoCommit = false
            };
            var producerConfig = new ProducerConfig {
                BootstrapServers = bootstrapServers
            };

            var testString  = "hello world";
            var testString2 = "hello world 2";
            var testString3 = "hello world 3";
            var testString4 = "hello world 4";

            DeliveryResult <Null, string> dr, dr3;

            using (var producer = new ProducerBuilder <Null, string>(producerConfig).Build())
            {
                dr = producer.ProduceAsync(singlePartitionTopic, new Message <Null, string> {
                    Value = testString
                }).Result;
                producer.ProduceAsync(singlePartitionTopic, new Message <Null, string> {
                    Value = testString2
                }).Wait();
                dr3 = producer.ProduceAsync(singlePartitionTopic, new Message <Null, string> {
                    Value = testString3
                }).Result;
                producer.ProduceAsync(singlePartitionTopic, new Message <Null, string> {
                    Value = testString4
                }).Wait();
                producer.Flush(TimeSpan.FromSeconds(10));
            }

            using (var consumer = new ConsumerBuilder <Null, string>(consumerConfig).Build())
            {
                // Explicitly specify partition offset.
                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    new TopicPartitionOffset(dr.TopicPartition, dr.Offset)
                });
                var cr = consumer.Consume(TimeSpan.FromSeconds(10));
                consumer.Commit();
                Assert.Equal(cr.Value, testString);

                // Determine offset to consume from automatically.
                consumer.Assign(new List <TopicPartition>()
                {
                    dr.TopicPartition
                });
                cr = consumer.Consume(TimeSpan.FromSeconds(10));
                consumer.Commit();
                Assert.NotNull(cr.Message);
                Assert.Equal(cr.Message.Value, testString2);

                // Explicitly specify partition offset.
                consumer.Assign(new TopicPartitionOffset(dr.TopicPartition, dr3.Offset));
                cr = consumer.Consume(TimeSpan.FromSeconds(10));
                consumer.Commit();
                Assert.Equal(cr.Value, testString3);

                // Determine offset to consume from automatically.
                consumer.Assign(dr.TopicPartition);
                cr = consumer.Consume(TimeSpan.FromSeconds(10));
                consumer.Commit();
                Assert.NotNull(cr.Message);
                Assert.Equal(cr.Message.Value, testString4);
            }

            Assert.Equal(0, Library.HandleCount);
            LogToFile("end   AssignOverloads");
        }
Beispiel #28
0
        static void Main(string[] args)
        {
            var config = new ConsumerConfig
            {
                GroupId          = "re",
                BootstrapServers = "192.168.3.10:9092",
                AutoOffsetReset  = AutoOffsetReset.Earliest
            };

            using (var consumer = new ConsumerBuilder <Ignore, string>(config).Build())
            {
                consumer.Subscribe("reserva");

                CancellationTokenSource cts = new CancellationTokenSource();
                Console.CancelKeyPress += (_, e) => {
                    e.Cancel = true;
                    cts.Cancel();
                };

                try
                {
                    while (true)
                    {
                        try
                        {
                            var cr = consumer.Consume(cts.Token);
                            Console.WriteLine(cr.Value);

                            OrdenCompraModel ordenCompra = JsonConvert.DeserializeObject <OrdenCompraModel>(cr.Value);
                            ordenCompra.sEstado = "reservado";

                            ProductoDAO productoDAO = new ProductoDAO();
                            productoDAO.actualizarStock(ordenCompra.lDetalleCompra);

                            OrdenCompraDAO ordenCompraDAO = new OrdenCompraDAO();
                            ordenCompraDAO.registrarOrdenCompra(ordenCompra);

                            var config2 = new ProducerConfig {
                                BootstrapServers = "192.168.3.10:9092"
                            };

                            Action <DeliveryReport <Null, string> > handler = r =>
                                                                              Console.WriteLine(!r.Error.IsError
                                ? $"Delivered message to {r.TopicPartitionOffset}"
                                : $"Delivery Error: {r.Error.Reason}");

                            using (var producer = new ProducerBuilder <Null, string>(config2).Build())
                            {
                                producer.ProduceAsync("factura", new Message <Null, string> {
                                    Value = JsonConvert.SerializeObject(ordenCompra)
                                });

                                producer.Flush(TimeSpan.FromSeconds(10));
                            }
                        }
                        catch (ConsumeException e)
                        {
                            Console.WriteLine($"Error occured: {e.Error.Reason}");
                        }
                    }
                }
                catch (OperationCanceledException)
                {
                    consumer.Close();
                }
            }
        }
Beispiel #29
0
        public static async Task Producer(string brokerList, string connStr, string topic, string cacertlocation)
        {
            try
            {
                var config = new ProducerConfig
                {
                    BootstrapServers = brokerList,
                    SecurityProtocol = SecurityProtocol.SaslSsl,
                    SaslMechanism    = SaslMechanism.Plain,
                    SaslUsername     = "******",
                    SaslPassword     = connStr,
                    SslCaLocation    = "",
                    //Debug = "security,broker,protocol"        //Uncomment for librdkafka debugging information
                };
                using (var producer = new ProducerBuilder <long, string>(config).SetKeySerializer(Serializers.Int64).SetValueSerializer(Serializers.Utf8).Build())
                {
                    Console.WriteLine("Sending 10 messages to topic: " + topic + ", broker(s): " + brokerList);

                    //Data Creation
                    CatalogAvailability availability = new CatalogAvailability
                    {
                        UW = "Yes"
                    };

                    List <string> CategoryList = new List <string>
                    {
                        "Cat1", "Cat2", "Cat3", "Cat4"
                    };

                    CatalogContractPrices contractPrices = new CatalogContractPrices
                    {
                        ContractA         = "Yes",
                        ContractC         = "C",
                        ContractCategoryA = "CatA"
                    };

                    CatalogVisibility visibility = new CatalogVisibility
                    {
                        ContractA = "A level Visibility",
                        ContractC = "C Visible",
                        Default   = "true"
                    };

                    CatalogFields fields = new CatalogFields {
                        Availability   = availability, Brand = "New", CategoryEn = CategoryList, CategoryFr = CategoryList,
                        ContractPrices = contractPrices, NetPrice = "20.2", ProductID = "99115", SaleRank = "First",
                        TitleEn        = "Good Product", TitleFr = "French Product", Visibility = visibility
                    };

                    CatalogModel catalogModel = new CatalogModel
                    {
                        Fields = fields
                    };

                    var CatalogJsonString = JsonConvert.SerializeObject(catalogModel);

                    for (int x = 0; x < 10; x++)
                    {
                        var msg            = string.Format("Sample Catalog #{0} sent at {1}", CatalogJsonString, DateTime.Now.ToString("yyyy-MM-dd_HH:mm:ss.ffff"));
                        var deliveryReport = await producer.ProduceAsync(topic, new Message <long, string> {
                            Key = DateTime.UtcNow.Ticks, Value = msg
                        });

                        Console.WriteLine(string.Format("Message {0} sent (value: '{1}')", x, msg));
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(string.Format("Exception Occurred - {0}", e.Message));
            }
        }
Beispiel #30
0
        public void Consumer_Poll_Error(string bootstrapServers)
        {
            LogToFile("start Consumer_Poll_Error");

            var producerConfig = new ProducerConfig {
                BootstrapServers = bootstrapServers
            };

            TopicPartitionOffset firstProduced = null;

            using (var producer = new ProducerBuilder <byte[], byte[]>(producerConfig).Build())
            {
                var keyData = Encoding.UTF8.GetBytes("key");
                firstProduced = producer.ProduceAsync(singlePartitionTopic, new Message <byte[], byte[]> {
                    Key = keyData
                }).Result.TopicPartitionOffset;
                var valData = Encoding.UTF8.GetBytes("val");
                producer.ProduceAsync(singlePartitionTopic, new Message <byte[], byte[]> {
                    Value = valData
                });
                Assert.True(producer.Flush(TimeSpan.FromSeconds(10)) == 0);
            }

            var consumerConfig = new ConsumerConfig
            {
                GroupId            = Guid.NewGuid().ToString(),
                BootstrapServers   = bootstrapServers,
                SessionTimeoutMs   = 6000,
                EnablePartitionEof = true
            };

            // test key deserialization error behavior
            using (var consumer =
                       new ConsumerBuilder <Null, string>(consumerConfig)
                       .SetRebalanceHandler((c, e) =>
            {
                if (e.IsAssignment)
                {
                    Assert.Single(e.Partitions);
                    Assert.Equal(firstProduced.TopicPartition, e.Partitions[0]);
                    c.Assign(e.Partitions.Select(p => new TopicPartitionOffset(p, firstProduced.Offset)));
                }
                else
                {
                    c.Unassign();
                }
            })
                       .Build())
            {
                consumer.Subscribe(singlePartitionTopic);

                int msgCnt = 0;
                int errCnt = 0;
                while (true)
                {
                    var s = consumer.Subscription;
                    try
                    {
                        var record = consumer.Consume(TimeSpan.FromSeconds(10));
                        if (record == null)
                        {
                            continue;
                        }
                        if (record.IsPartitionEOF)
                        {
                            break;
                        }

                        msgCnt += 1;
                    }
                    catch (ConsumeException e)
                    {
                        errCnt += 1;
                        Assert.Equal(ErrorCode.Local_KeyDeserialization, e.Error.Code);
                        Assert.Equal(firstProduced.Offset.Value, e.ConsumerRecord.Offset.Value);
                    }
                }

                Assert.Equal(1, msgCnt);
                Assert.Equal(1, errCnt);

                consumer.Close();
            }

            // test value deserialization error behavior.
            using (var consumer =
                       new ConsumerBuilder <string, Null>(consumerConfig)
                       .SetRebalanceHandler((c, e) =>
            {
                if (e.IsAssignment)
                {
                    Assert.Single(e.Partitions);
                    Assert.Equal(firstProduced.TopicPartition, e.Partitions[0]);
                    c.Assign(e.Partitions.Select(p => new TopicPartitionOffset(p, firstProduced.Offset)));
                }
                else
                {
                    c.Unassign();
                }
            })
                       .Build())
            {
                consumer.Subscribe(singlePartitionTopic);

                int msgCnt = 0;
                int errCnt = 0;
                while (true)
                {
                    try
                    {
                        var record = consumer.Consume(TimeSpan.FromSeconds(10));
                        if (record == null)
                        {
                            continue;
                        }
                        if (record.IsPartitionEOF)
                        {
                            break;
                        }

                        msgCnt += 1;
                    }
                    catch (ConsumeException e)
                    {
                        errCnt += 1;
                        Assert.Equal(ErrorCode.Local_ValueDeserialization, e.Error.Code);
                        Assert.Equal(firstProduced.Offset.Value + 1, e.ConsumerRecord.Offset.Value);
                    }
                }

                Assert.Equal(1, msgCnt);
                Assert.Equal(1, errCnt);

                consumer.Close();
            }

            Assert.Equal(0, Library.HandleCount);
            LogToFile("end   Consumer_Poll_Error");
        }
            public static ProducerSubmission VatRegistered(bool vatRegistered)
            {
                var builder = new ProducerBuilder();
                builder.vatRegistered = vatRegistered;

                return builder.Build();
            }
Beispiel #32
0
        public void Producer_BeginProduce_Error(string bootstrapServers)
        {
            LogToFile("start Producer_BeginProduce_Error");

            var producerConfig = new ProducerConfig {
                BootstrapServers = bootstrapServers
            };


            // serializer case.

            int count = 0;
            Action <DeliveryReport <Null, String> > dh = (DeliveryReport <Null, String> dr) =>
            {
                Assert.Equal(ErrorCode.Local_UnknownPartition, dr.Error.Code);
                Assert.False(dr.Error.IsFatal);
                Assert.Equal((Partition)1, dr.Partition);
                Assert.Equal(singlePartitionTopic, dr.Topic);
                Assert.Equal(Offset.Invalid, dr.Offset);
                Assert.Null(dr.Message.Key);
                Assert.Equal("test", dr.Message.Value);
                Assert.Equal(PersistenceStatus.NotPersisted, dr.Status);
                Assert.Equal(TimestampType.NotAvailable, dr.Message.Timestamp.Type);
                count += 1;
            };

            using (var producer =
                       new ProducerBuilder <Null, String>(producerConfig)
                       .SetKeySerializer(Serializers.Null)
                       .SetValueSerializer(Serializers.Utf8)
                       .Build())
            {
                producer.BeginProduce(new TopicPartition(singlePartitionTopic, 1), new Message <Null, String> {
                    Value = "test"
                }, dh);
                producer.Flush(TimeSpan.FromSeconds(10));
            }

            Assert.Equal(1, count);


            // byte[] case.

            count = 0;
            Action <DeliveryReport <byte[], byte[]> > dh2 = (DeliveryReport <byte[], byte[]> dr) =>
            {
                Assert.Equal(ErrorCode.Local_UnknownPartition, dr.Error.Code);
                Assert.Equal((Partition)42, dr.Partition);
                Assert.Equal(singlePartitionTopic, dr.Topic);
                Assert.Equal(Offset.Invalid, dr.Offset);
                Assert.Equal(new byte[] { 11 }, dr.Message.Key);
                Assert.Null(dr.Message.Value);
                Assert.Equal(TimestampType.NotAvailable, dr.Message.Timestamp.Type);
                count += 1;
            };

            using (var producer = new ProducerBuilder <byte[], byte[]>(producerConfig).Build())
            {
                producer.BeginProduce(new TopicPartition(singlePartitionTopic, 42), new Message <byte[], byte[]> {
                    Key = new byte[] { 11 }
                }, dh2);
                producer.Flush(TimeSpan.FromSeconds(10));
            }

            Assert.Equal(1, count);

            Assert.Equal(0, Library.HandleCount);
            LogToFile("end   Producer_BeginProduce_Error");
        }
            public static ProducerSubmission WithObligationType(ObligationType obligationType)
            {
                var builder = new ProducerBuilder();
                builder.obligationType = obligationType;

                return builder.Build();
            }
        static async Task Main(string[] args)
        {
            if (args.Length != 3)
            {
                Console.WriteLine("Usage: .. bootstrapServers schemaRegistryUrl topicName");
                return;
            }

            string bootstrapServers  = args[0];
            string schemaRegistryUrl = args[1];
            string topicName         = args[2];

            var producerConfig = new ProducerConfig
            {
                BootstrapServers = bootstrapServers
            };

            var schemaRegistryConfig = new SchemaRegistryConfig
            {
                // Note: you can specify more than one schema registry url using the
                // schema.registry.url property for redundancy (comma separated list).
                // The property name is not plural to follow the convention set by
                // the Java implementation.
                Url = schemaRegistryUrl
            };

            var consumerConfig = new ConsumerConfig
            {
                BootstrapServers = bootstrapServers,
                GroupId          = "json-example-consumer-group"
            };

            // Note: Specifying json serializer configuration is optional.
            var jsonSerializerConfig = new JsonSerializerConfig
            {
                BufferBytes = 100
            };

            CancellationTokenSource cts = new CancellationTokenSource();
            var consumeTask             = Task.Run(() =>
            {
                using (var consumer =
                           new ConsumerBuilder <Null, Person>(consumerConfig)
                           .SetValueDeserializer(new JsonDeserializer <Person>().AsSyncOverAsync())
                           .SetErrorHandler((_, e) => Console.WriteLine($"Error: {e.Reason}"))
                           .Build())
                {
                    consumer.Subscribe(topicName);

                    try
                    {
                        while (true)
                        {
                            try
                            {
                                var cr = consumer.Consume(cts.Token);
                                Console.WriteLine($"Name: {cr.Message.Value.FirstName} {cr.Message.Value.LastName}, age: {cr.Message.Value.Age}");
                            }
                            catch (ConsumeException e)
                            {
                                Console.WriteLine($"Consume error: {e.Error.Reason}");
                            }
                        }
                    }
                    catch (OperationCanceledException)
                    {
                        consumer.Close();
                    }
                }
            });

            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                using (var producer =
                           new ProducerBuilder <Null, Person>(producerConfig)
                           .SetValueSerializer(new JsonSerializer <Person>(schemaRegistry, jsonSerializerConfig))
                           .Build())
                {
                    Console.WriteLine($"{producer.Name} producing on {topicName}. Enter first names, q to exit.");

                    int    i = 0;
                    string text;
                    while ((text = Console.ReadLine()) != "q")
                    {
                        Person person = new Person {
                            FirstName = text, LastName = "lastname", Age = i++ % 150
                        };
                        await producer
                        .ProduceAsync(topicName, new Message <Null, Person> {
                            Value = person
                        })
                        .ContinueWith(task => task.IsFaulted
                            ? $"error producing message: {task.Exception.Message}"
                            : $"produced to: {task.Result.TopicPartitionOffset}");
                    }
                }

            cts.Cancel();

            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
            {
                // Note: a subject name strategy was not configured, so the default "Topic" was used.
                var schema = await schemaRegistry.GetLatestSchemaAsync(SubjectNameStrategy.Topic.ConstructValueSubjectName(topicName));

                Console.WriteLine("\nThe JSON schema corresponding to the written data:");
                Console.WriteLine(schema.SchemaString);
            }
        }
            public static ProducerSubmission WithSellingTechniqueType(SellingTechniqueType sellingTechniqueType)
            {
                var builder = new ProducerBuilder();
                builder.sellingTechniqueType = sellingTechniqueType;

                return builder.Build();
            }
        private static void ProduceConsume(string bootstrapServers, string schemaRegistryServers, SubjectNameStrategy nameStrategy)
        {
            var producerConfig = new ProducerConfig
            {
                BootstrapServers = bootstrapServers
            };

            var consumerConfig = new ConsumerConfig
            {
                BootstrapServers   = bootstrapServers,
                GroupId            = Guid.NewGuid().ToString(),
                SessionTimeoutMs   = 6000,
                AutoOffsetReset    = AutoOffsetReset.Earliest,
                EnablePartitionEof = true
            };

            var schemaRegistryConfig = new SchemaRegistryConfig
            {
                Url = schemaRegistryServers,
            };

            var avroSerializerConfig = new AvroSerializerConfig
            {
                SubjectNameStrategy = nameStrategy
            };

            var adminClientConfig = new AdminClientConfig
            {
                BootstrapServers = bootstrapServers
            };

            string topic = Guid.NewGuid().ToString();

            using (var adminClient = new AdminClientBuilder(adminClientConfig).Build())
            {
                adminClient.CreateTopicsAsync(
                    new List <TopicSpecification> {
                    new TopicSpecification {
                        Name = topic, NumPartitions = 1, ReplicationFactor = 1
                    }
                }).Wait();
            }

            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                using (var producer =
                           new ProducerBuilder <string, ProduceConsumeUser>(producerConfig)
                           .SetKeySerializer(new AvroSerializer <string>(schemaRegistry))
                           // Test ValueSubjectNameStrategy here,
                           // and KeySubjectNameStrategy in ProduceConsumeGeneric.
                           .SetValueSerializer(new AvroSerializer <ProduceConsumeUser>(schemaRegistry, avroSerializerConfig))
                           .Build())
                {
                    for (int i = 0; i < 100; ++i)
                    {
                        var user = new ProduceConsumeUser
                        {
                            name            = i.ToString(),
                            favorite_number = i,
                            favorite_color  = "blue"
                        };

                        producer
                        .ProduceAsync(topic, new Message <string, ProduceConsumeUser> {
                            Key = user.name, Value = user
                        })
                        .Wait();
                    }
                    Assert.Equal(0, producer.Flush(TimeSpan.FromSeconds(10)));
                }

            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
                using (var consumer =
                           new ConsumerBuilder <string, ProduceConsumeUser>(consumerConfig)
                           .SetKeyDeserializer(new AvroDeserializer <string>(schemaRegistry).AsSyncOverAsync())
                           .SetValueDeserializer(new AvroDeserializer <ProduceConsumeUser>(schemaRegistry).AsSyncOverAsync())
                           .SetErrorHandler((_, e) => Assert.True(false, e.Reason))
                           .Build())
                {
                    consumer.Subscribe(topic);

                    int i = 0;
                    while (true)
                    {
                        var record = consumer.Consume(TimeSpan.FromMilliseconds(100));
                        if (record == null)
                        {
                            continue;
                        }
                        if (record.IsPartitionEOF)
                        {
                            break;
                        }

                        Assert.Equal(i.ToString(), record.Message.Key);
                        Assert.Equal(i.ToString(), record.Message.Value.name);
                        Assert.Equal(i, record.Message.Value.favorite_number);
                        Assert.Equal("blue", record.Message.Value.favorite_color);
                        i += 1;
                    }

                    Assert.Equal(100, i);

                    consumer.Close();
                }

            // Check that what's in schema registry is what's expected.
            using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig))
            {
                var subjects = schemaRegistry.GetAllSubjectsAsync().Result;

                if (nameStrategy == SubjectNameStrategy.TopicRecord)
                {
                    Assert.Equal(2, (int)subjects.Where(s => s.Contains(topic)).Count());
                    Assert.Single(subjects.Where(s => s == $"{topic}-key"));
                    Assert.Single(subjects.Where(s => s == $"{topic}-{((Avro.RecordSchema)ProduceConsumeUser._SCHEMA).Fullname}"));
                }

                if (nameStrategy == SubjectNameStrategy.Topic)
                {
                    Assert.Equal(2, (int)subjects.Where(s => s.Contains(topic)).Count());
                    Assert.Single(subjects.Where(s => s == $"{topic}-key"));
                    Assert.Single(subjects.Where(s => s == $"{topic}-value"));
                }

                if (nameStrategy == SubjectNameStrategy.Record)
                {
                    Assert.Single(subjects.Where(s => s.Contains(topic))); // the string key.
                    Assert.Single(subjects.Where(s => s == $"{topic}-key"));
                    Assert.Single(subjects.Where(s => s == $"{((Avro.RecordSchema)ProduceConsumeUser._SCHEMA).Fullname}"));
                }
            }
        }
            public static ProducerSubmission WithAuthorisedRepresentative(AuthorisedRepresentative authorisedRepresentative)
            {
                var builder = new ProducerBuilder();
                builder.authorisedRepresentative = authorisedRepresentative;

                return builder.Build();
            }
Beispiel #38
0
        static async Task Main(string[] args)
        {
            if (args.Length != 3)
            {
                Console.WriteLine("Usage: .. bootstrapServers schemaRegistryUrl topicName");
                return;
            }

            string bootstrapServers  = args[0];
            string schemaRegistryUrl = args[1];
            string topicName         = args[2];
            string groupName         = "avro-generic-example-group";

            // var s = (RecordSchema)RecordSchema.Parse(File.ReadAllText("my-schema.json"));
            var s = (RecordSchema)RecordSchema.Parse(
                @"{
                    ""namespace"": ""Confluent.Kafka.Examples.AvroSpecific"",
                    ""type"": ""record"",
                    ""name"": ""User"",
                    ""fields"": [
                        {""name"": ""name"", ""type"": ""string""},
                        {""name"": ""favorite_number"",  ""type"": [""int"", ""null""]},
                        {""name"": ""favorite_color"", ""type"": [""string"", ""null""]}
                    ]
                  }"
                );

            CancellationTokenSource cts = new CancellationTokenSource();
            var consumeTask             = Task.Run(() =>
            {
                using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig {
                    SchemaRegistryUrl = schemaRegistryUrl
                }))
                    using (var consumer =
                               new ConsumerBuilder <string, GenericRecord>(new ConsumerConfig {
                        BootstrapServers = bootstrapServers, GroupId = groupName
                    })
                               .SetKeyDeserializer(new AvroDeserializer <string>(schemaRegistry))
                               .SetValueDeserializer(new AvroDeserializer <GenericRecord>(schemaRegistry))
                               .SetErrorHandler((_, e) => Console.WriteLine($"Error: {e.Reason}"))
                               .Build())
                    {
                        consumer.Subscribe(topicName);

                        while (!cts.Token.IsCancellationRequested)
                        {
                            try
                            {
                                var consumeResult = consumer.Consume(cts.Token);

                                Console.WriteLine($"Key: {consumeResult.Message.Key}\nValue: {consumeResult.Value}");
                            }
                            catch (ConsumeException e)
                            {
                                Console.WriteLine("Consume error: " + e.Error.Reason);
                            }
                        }

                        consumer.Close();
                    }
            }, cts.Token);

            using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig {
                SchemaRegistryUrl = schemaRegistryUrl
            }))
                using (var producer =
                           new ProducerBuilder <string, GenericRecord>(new ProducerConfig {
                    BootstrapServers = bootstrapServers
                })
                           .SetKeySerializer(new AvroSerializer <string>(schemaRegistry))
                           .SetValueSerializer(new AvroSerializer <GenericRecord>(schemaRegistry))
                           .Build())
                {
                    Console.WriteLine($"{producer.Name} producing on {topicName}. Enter user names, q to exit.");

                    int    i = 0;
                    string text;
                    while ((text = Console.ReadLine()) != "q")
                    {
                        var record = new GenericRecord(s);
                        record.Add("name", text);
                        record.Add("favorite_number", i++);
                        record.Add("favorite_color", "blue");

                        await producer
                        .ProduceAsync(topicName, new Message <string, GenericRecord> {
                            Key = text, Value = record
                        })
                        .ContinueWith(task => task.IsFaulted
                            ? $"error producing message: {task.Exception.Message}"
                            : $"produced to: {task.Result.TopicPartitionOffset}");
                    }
                }

            cts.Cancel();
        }
            public static ProducerSubmission WithBrandNames(List<BrandName> brandNames)
            {
                var builder = new ProducerBuilder();
                builder.brandNames = brandNames;

                return builder.Build();
            }
        public void SendMessage()
        {
            var random = new Random();
            var config = new ProducerConfig {
                BootstrapServers   = "omnibus-01.srvs.cloudkafka.com:9094,omnibus-02.srvs.cloudkafka.com:9094,omnibus-03.srvs.cloudkafka.com:9094"
                , SaslUsername     = "******",
                SaslPassword       = "******"
                , SecurityProtocol = SecurityProtocol.SaslSsl,
                SaslMechanism      = SaslMechanism.ScramSha256
            };

            var user = new User {
                Id = 100, Email = "*****@*****.**", Nome = "Teste"
            };
            var produto = new Produto {
                Id = 1, Nome = "Televisao", Valor = 10
            };

            using (var p = new ProducerBuilder <Null, User>(config).
                           SetValueSerializer(new CustomSerializer <User>()).
                           Build())
            {
                try
                {
                    var header = new Headers()
                    {
                    };
                    header.Add("TYPE", Encoding.UTF8.GetBytes(typeof(User).Name));
                    for (int i = 0; i < 10; i++)
                    {
                        var dr = p.ProduceAsync(Topicos[random.Next(2)], new Message <Null, User> {
                            Headers = header, Value = user
                        }).GetAwaiter().GetResult();
                        Console.WriteLine($"Delivered '{dr.Value}' to '{dr.TopicPartitionOffset}'");
                        user.Id++;
                    }
                }
                catch (ProduceException <Null, string> e)
                {
                    Console.WriteLine($"Delivery failed: {e.Error.Reason}");
                }
            }

            using (var p = new ProducerBuilder <Null, Produto>(config).
                           SetValueSerializer(new CustomSerializer <Produto>()).
                           Build())
            {
                try
                {
                    var header = new Headers()
                    {
                    };
                    header.Add("TYPE", Encoding.UTF8.GetBytes(typeof(Produto).Name));
                    for (int i = 0; i < 10; i++)
                    {
                        var dr = p.ProduceAsync(Topicos[random.Next(2)], new Message <Null, Produto> {
                            Headers = header, Value = produto
                        }).GetAwaiter().GetResult();
                        Console.WriteLine($"Delivered '{dr.Value}' to '{dr.TopicPartitionOffset}'");
                        user.Id++;
                    }
                }
                catch (ProduceException <Null, string> e)
                {
                    Console.WriteLine($"Delivery failed: {e.Error.Reason}");
                }
            }
        }
            public static ProducerSubmission WithCeaseToExist(DateTime? ceaseToExist)
            {
                var builder = new ProducerBuilder();
                builder.ceaseToExist = ceaseToExist;

                return builder.Build();
            }
        static async Task Main(string[] args)
        {
            var logger = new LoggerConfiguration()
                         .WriteTo.Console()
                         .CreateLogger();

            logger.Information("Testando o envio de mensagens com Kafka");

            if (args.Length < 3)
            {
                logger.Error(
                    "Informe ao menos 3 parâmetros: " +
                    "no primeiro o IP/porta para testes com o Kafka, " +
                    "no segundo o Topic que receberá a mensagem, " +
                    "já no terceito em diante as mensagens a serem " +
                    "enviadas a um Topic no Kafka...");
                return;
            }

            string bootstrapServers = args[0];
            string nomeTopic        = args[1];

            logger.Information($"BootstrapServers = {bootstrapServers}");
            logger.Information($"Topic = {nomeTopic}");

            try
            {
                var config = new ProducerConfig
                {
                    BootstrapServers = bootstrapServers,
                    Partitioner      = Partitioner.ConsistentRandom
                };

                using (var producer = new ProducerBuilder <Null, string>(config).Build())
                {
                    while (true)
                    {
                        for (int i = 2; i < args.Length; i++)
                        {
                            var result = await producer.ProduceAsync(
                                nomeTopic,
                                new Message <Null, string>
                            {
                                Value = args[i] + ": " + DateTime.Now.ToString("HH:mm:ss")
                            });

                            logger.Information(
                                $"Mensagem: {args[i]} | " +
                                $"Status: { result.Status.ToString()}");
                        }
                        logger.Information("Concluído o envio de mensagens");
                        Thread.Sleep(5000);
                    }
                }
            }
            catch (Exception ex)
            {
                logger.Error($"Exceção: {ex.GetType().FullName} | " +
                             $"Mensagem: {ex.Message}");
            }
        }