public KafkaEventBus(Dictionary <string, object> config, ISerialization serializer, RouteTable routes) { _serializer = serializer; _routes = routes; _encoding = Encoding.UTF8; _config = config; var topics = routes.InboundCommands.Select(c => c.Name).Union(routes.InboundEvents.Select(c => c.Name)); var seri = new StringSerializer(_encoding); var deseri = new StringDeserializer(_encoding); var noop = new NullDeserializer(); _outbound = new Producer <Null, string>(_config, new NullSerializer(), seri); var cfg = new Dictionary <string, object>(_config); cfg.Add("group.id", Environment.MachineName); //TODO: not right. _inbound = new Consumer <Null, string>(cfg, noop, deseri); _inbound.OnMessage += (key, val) => { OnMessageReceived(val); }; _inbound.Subscribe(topics); _thread = new ConsumerPollerThread(_inbound); _thread.Start(); }
public void TestChangeDefaultSerialization() { var config = new SerializationConfig(); ISerializer ser = new StringSerializer(); IDeserializer deser = new StringDeserializer(); var s = config.GetSerializersForTopic("topicnotfound"); Assert.That(s, Is.EqualTo(Tuple.Create(ByteArraySerialization.DefaultSerializer, ByteArraySerialization.DefaultSerializer))); var d = config.GetDeserializersForTopic("topicnotfound"); Assert.That(d, Is.EqualTo(Tuple.Create(ByteArraySerialization.DefaultDeserializer, ByteArraySerialization.DefaultDeserializer))); config.SetDefaultSerializers(ser, ser); config.SetDefaultDeserializers(deser, deser); s = config.GetSerializersForTopic("topicnotfound"); Assert.That(s, Is.EqualTo(Tuple.Create(ser, ser))); d = config.GetDeserializersForTopic("topicnotfound"); Assert.That(d, Is.EqualTo(Tuple.Create(deser, deser))); }
// This method gets called by the runtime. Use this method to add services to the container. public void ConfigureServices(IServiceCollection services) { services.Configure <MongoConfigurationOptions>(Configuration.GetSection("MongoDb")); services.Configure <KafkaSettings>(Configuration.GetSection("Kafka")); services.AddMvc().SetCompatibilityVersion(CompatibilityVersion.Version_2_1); services.AddAuthentication("Bearer").AddIdentityServerAuthentication(options => { options.Authority = Configuration.GetSection("IdentityUrlExternal").Value; options.RequireHttpsMetadata = false; options.ApiName = "appointment"; }); services.AddSingleton <ISubscriptionEventBus, EventBusKafka>(sp => { var iLifetimeScope = sp.GetRequiredService <ILifetimeScope>(); var consumerFactory = sp.GetRequiredService <IKafkaConsumerFactory>(); var kafkaSettings = sp.GetRequiredService <IOptions <KafkaSettings> >().Value; var kafkaConfig = new KafkaConsumerConfiguration(kafkaSettings.BrokerAddresses, kafkaSettings.GroupId, kafkaSettings.ClientId, kafkaSettings.SubscribedTopics); var valueDeserializer = new StringDeserializer(); var topics = kafkaSettings.SubscribedTopics; var eventBusSubscriptionsManager = sp.GetRequiredService <IEventBusSubscriptionsManager>(); return(new EventBusKafka(eventBusSubscriptionsManager, consumerFactory, iLifetimeScope, kafkaConfig, valueDeserializer)); }); services.AddSwaggerDocumentation(Configuration); services.AddTransient <IntegrationTestEventHandler>(); }
private Consumer <Null, string> GetConsumer( string groupName = "default-group-name", bool autoCommit = true) { var conf = new ConfigurationBuilder() .AddJsonFile(Path.Combine(PlatformServices.Default.Application.ApplicationBasePath, "sensitive.json"), false) .Build(); var brokerList = conf["Kafka:Server:Hosts:0"]; var advancedConsumerConfig = new Dictionary <string, object> { ["group.id"] = groupName, // 自动后台提交offset ["enable.auto.commit"] = autoCommit, ["auto.commit.interval.ms"] = 5000, ["statistics.interval.ms"] = 60000, ["bootstrap.servers"] = brokerList, ["default.topic.config"] = new Dictionary <string, object> { // earliest ["auto.offset.reset"] = "earliest" } }; var valDeserializer = new StringDeserializer(Encoding.UTF8); var consumer = new Consumer <Null, string>(advancedConsumerConfig, null, valDeserializer); ConfigureEvent(consumer); return(consumer); }
static void Main(string[] args) { Console.WriteLine("Starting Consumer!"); var config = new Dictionary <string, object> { { "group.id", "dotnet-consumer-group" }, { "bootstrap.servers", "kafka-1:9092" }, { "auto.commit.interval.ms", 5000 }, { "auto.offset.reset", "earliest" } }; var deserializer = new StringDeserializer(Encoding.UTF8); using (var consumer = new Consumer <string, string> (config, deserializer, deserializer)) { consumer.OnMessage += (_, msg) => Console.WriteLine($"Read ('{msg.Key}', '{msg.Value}') from: {msg.TopicPartitionOffset}"); consumer.OnError += (_, error) => Console.WriteLine($"Error: {error}"); consumer.OnConsumeError += (_, msg) => Console.WriteLine($"Consume error ({msg.TopicPartitionOffset}): {msg.Error}"); consumer.Subscribe("hello_world_topic"); while (true) { consumer.Poll(TimeSpan.FromMilliseconds(100)); } } }
public void should_deserialize_Empty_xml_to_empty_object() { var deserializer = new StringDeserializer(); var testObject = deserializer.DeserializeApiResponse <TestEmptyObject>(EmptyXmlResponse); Assert.That(testObject, Is.Not.Null); }
public KafkaConsumerClient(string groupId, KafkaOptions options) { _groupId = groupId; _kafkaOptions = options ?? throw new ArgumentNullException(nameof(options)); StringDeserializer = new StringDeserializer(Encoding.UTF8); InitKafkaClient(); }
public void should_deserialize_Empty_xml_to_empty_object() { var deserializer = new StringDeserializer<TestEmptyObject>(); var testObject = deserializer.Deserialize(EmptyXmlResponse); Assert.That(testObject, Is.Not.Null); }
private static Consumer <string, string> CreateKafkaConsumer(string groupId) { var consumerConfig = GetKafkaConsumerConfig(groupId); var stringDeserializer = new StringDeserializer(Encoding.UTF8); var keyDeserializer = new StringDeserializer(Encoding.UTF8); var consumer = new Consumer <string, string>(consumerConfig, keyDeserializer, stringDeserializer); return(consumer); }
public void TestSerializationConfig() { var config = new SerializationConfig(); ISerializer ser = new StringSerializer(); IDeserializer deser = new StringDeserializer(); var t1 = Tuple.Create(ser, ser); var t2 = Tuple.Create(ser, ser); Assert.That(t1, Is.EqualTo(t2)); var s = config.GetSerializersForTopic("topicnotfound"); Assert.That(s, Is.EqualTo(Tuple.Create(ByteArraySerialization.DefaultSerializer, ByteArraySerialization.DefaultSerializer))); var d = config.GetDeserializersForTopic("topicnotfound"); Assert.That(d, Is.EqualTo(Tuple.Create(ByteArraySerialization.DefaultDeserializer, ByteArraySerialization.DefaultDeserializer))); config.SetSerializersForTopic("topicnotfound", ser, ser); config.SetDeserializersForTopic("topicnotfound", deser, deser); s = config.GetSerializersForTopic("topicnotfound"); Assert.That(s, Is.EqualTo(Tuple.Create(ser, ser))); d = config.GetDeserializersForTopic("topicnotfound"); Assert.That(d, Is.EqualTo(Tuple.Create(deser, deser))); var config2 = new SerializationConfig(config); s = config2.GetSerializersForTopic("topicnotfound"); Assert.That(s, Is.EqualTo(Tuple.Create(ser, ser))); d = config2.GetDeserializersForTopic("topicnotfound"); Assert.That(d, Is.EqualTo(Tuple.Create(deser, deser))); config2.SetSerializersForTopic("topicnotfound", null, ser); config2.SetDeserializersForTopic("topicnotfound", null, deser); s = config2.GetSerializersForTopic("topicnotfound"); Assert.That(s, Is.EqualTo(Tuple.Create(ByteArraySerialization.DefaultSerializer, ser))); d = config2.GetDeserializersForTopic("topicnotfound"); Assert.That(d, Is.EqualTo(Tuple.Create(ByteArraySerialization.DefaultDeserializer, deser))); config2.SetSerializersForTopic("topicnotfound", ser, null); config2.SetDeserializersForTopic("topicnotfound", deser, null); s = config2.GetSerializersForTopic("topicnotfound"); Assert.That(s, Is.EqualTo(Tuple.Create(ser, ByteArraySerialization.DefaultSerializer))); d = config2.GetDeserializersForTopic("topicnotfound"); Assert.That(d, Is.EqualTo(Tuple.Create(deser, ByteArraySerialization.DefaultDeserializer))); }
/// <summary> /// Initializes a new instance of the <see cref="StoreNotificationStoreProvider"/> class. /// </summary> public StoreNotificationStoreProvider() { _timeout = TimeSpan.FromMilliseconds(KafkaSettings.PollingIntervalInMilliseconds); _deserializer = new StringDeserializer(Encoding.UTF8); _config = new Dictionary <string, object> { { KafkaSettings.DebugKey, KafkaSettings.DebugContexts }, { KafkaSettings.BrokerListKey, KafkaSettings.BrokerList }, { KafkaSettings.EnableAutoCommitKey, KafkaSettings.EnableAutoCommit } }; }
public void DeserializerConstructValueViaConfig() { string testString = "hello world"; var serialized = new StringSerializer(Encoding.UTF8).Serialize("mytopic", testString); var config = new Dictionary <string, object>(); config.Add("dotnet.string.deserializer.encoding.value", "utf-8"); var deserializer = new StringDeserializer(); var newConfig = deserializer.Configure(config, false); Assert.Equal(0, newConfig.Count()); Assert.Equal(testString, deserializer.Deserialize("mytopic", serialized)); }
public virtual Task Run() { var kafkaConfig = new Dictionary <string, object> { { "group.id", _config.GroupId }, { "bootstrap.servers", _config.BrokerList } }; return(Task.Run(() => { var typeName = typeof(TPayload).Name; _logger.LogInformation($"{typeName} Event Subscriber Starting"); var serializer = new StringDeserializer(Encoding.UTF8); using (var consumer = new Consumer <string, string>(kafkaConfig, serializer, serializer)) { consumer.Assign(new List <TopicPartitionOffset> { new TopicPartitionOffset(_config.Topic, 0, 0) }); while (true) { Message <string, string> msg; if (consumer.Consume(out msg, TimeSpan.FromSeconds(1))) { try { _logger.LogInformation($"{typeName} Event Received: {msg.Value}"); OnRawEventReceived(new ReceivedRawEventArgs(msg.Key, msg.Value)); var evt = _eventSerializer.Deserialize(msg.Value); if (evt?.Validate() == true) { OnValidEventReceived(new ReceivedEventArgs <TPayload>(evt)); } } catch (JsonSerializationException exc) { _logger.LogWarning($"{typeName} Event Error: {exc}"); return null; } } } } _logger.LogInformation($"{typeName} Event Subscriber Ending"); })); }
private static void RegisterConfluentKafkaConsumer( ContainerBuilder containerBuilder, ConfluentConsumerSettings settings) { var consumerConfig = settings.AsDictionary(); var keyDeserializer = new StringDeserializer(Encoding.UTF8); var valueDeserializer = new JsonDeserializer <string>(); var consumer = new Consumer <string, string>( consumerConfig, keyDeserializer, valueDeserializer); containerBuilder.RegisterInstance(consumer) .As <Consumer <string, string> >(); }
public void DeserializeNoConfigValue() { try { var deserializer = new StringDeserializer(); deserializer.Configure(new Dictionary <string, object>(), false); } catch (Exception) { return; } Assert.True(false, "Exception expected"); }
public void should_deserialize_well_formed_xml() { var deserializer = new StringDeserializer(); var testObject = deserializer.DeserializeApiResponse <TestObject>(TestObjectXmlResponse); Assert.That(testObject, Is.Not.Null); Assert.That(testObject.Id, Is.EqualTo(1)); Assert.That(testObject.Name, Is.EqualTo("A big test object")); Assert.That(testObject.StringList, Is.Not.Null); Assert.That(testObject.StringList.Count, Is.GreaterThan(0)); Assert.That(testObject.ObjectList, Is.Not.Null); Assert.That(testObject.ObjectList.Count, Is.GreaterThan(0)); }
public void should_deserialize_well_formed_xml() { var deserializer = new StringDeserializer<TestObject>(); var testObject = deserializer.Deserialize(TestObjectXmlResponse); Assert.That(testObject, Is.Not.Null); Assert.That(testObject.Id, Is.EqualTo(1)); Assert.That(testObject.Name, Is.EqualTo( "A big test object")); Assert.That(testObject.StringList, Is.Not.Null); Assert.That(testObject.StringList.Count, Is.GreaterThan(0)); Assert.That(testObject.ObjectList, Is.Not.Null); Assert.That(testObject.ObjectList.Count, Is.GreaterThan(0)); }
public void TestStringSerialization() { using (var stream = new MemoryStream()) { var serializer = new StringSerializer(); Assert.That(() => serializer.Serialize(Value, stream), Throws.TypeOf <ArgumentException>()); Assert.That(() => serializer.Serialize(TheValue, stream), Is.EqualTo(Value.Length)); Assert.That(stream.Length, Is.EqualTo(Value.Length)); CompareArrays(Value, stream.GetBuffer(), 0); var deserializer = new StringDeserializer(); stream.Position = 0; var output = deserializer.Deserialize(stream, Value.Length) as string; Assert.That(output, Is.EqualTo(TheValue)); } }
public void DeserializeInvalidConfigValue() { var config = new Dictionary <string, object>(); config.Add("dotnet.string.deserializer.encoding.value", "invalid-encoding"); try { var deserializer = new StringDeserializer(); deserializer.Configure(config, false); } catch (Exception) { return; } Assert.True(false, "Exception expected"); }
public void DeserializeDoubleConfigValue() { var config = new Dictionary <string, object>(); config.Add("dotnet.string.deserializer.encoding.value", "utf-8"); try { var deserializer = new StringDeserializer(Encoding.UTF32); deserializer.Configure(config, false); } catch (ArgumentException) { return; } Assert.True(false, "Exception expected"); }
public void Constuctor() { // Throw exception if 'group.id' is not set in config and ensure that exception // mentions 'group.id'. var config = new Dictionary <string, object>(); var e = Assert.Throws <ArgumentException>(() => { var c = new Consumer(config); }); Assert.True(e.Message.Contains("group.id")); e = Assert.Throws <ArgumentException>(() => { var c = new Consumer <Null, string>(config, null, new StringDeserializer(Encoding.UTF8)); }); Assert.True(e.Message.Contains("group.id")); // Throw exception if a config value is null and ensure that exception mentions the // respective config key. var configWithNullValue = CreateValidConfiguration(); configWithNullValue["sasl.password"] = null; e = Assert.Throws <ArgumentException>(() => { var c = new Consumer <byte[], byte[]>(configWithNullValue, new ByteArrayDeserializer(), new ByteArrayDeserializer()); }); Assert.Contains("sasl.password", e.Message); // Throw exception if a config value within default.topic.config is null and // ensure that exception mentions the respective config key. var configWithDefaultTopicNullValue = CreateValidConfiguration(); configWithDefaultTopicNullValue["default.topic.config"] = new Dictionary <string, object>() { { "auto.offset.reset", null } }; e = Assert.Throws <ArgumentException>(() => { var c = new Consumer <byte[], byte[]>(configWithDefaultTopicNullValue, new ByteArrayDeserializer(), new ByteArrayDeserializer()); }); Assert.Contains("default.topic.config", e.Message); Assert.Contains("auto.offset.reset", e.Message); // Throw exception when serializer and deserializer are equal and ensure that exception // message indicates the issue. e = Assert.Throws <ArgumentException>(() => { var validConfig = CreateValidConfiguration(); var deserializer = new StringDeserializer(Encoding.UTF8); var c = new Consumer <string, string>(validConfig, deserializer, deserializer); }); Assert.True(e.Message.Contains("must not be the same object")); // positve case covered by integration tests. here, avoiding creating a rd_kafka_t instance. }
private void RegisterEventBus(IServiceCollection services) { services.Configure <KafkaSettings>(Configuration.GetSection("Kafka")); services.AddSingleton <ISubscriptionEventBus, EventBusKafka>(sp => { var iLifetimeScope = sp.GetRequiredService <ILifetimeScope>(); var consumerFactory = sp.GetRequiredService <IKafkaConsumerFactory>(); var kafkaSettings = sp.GetRequiredService <IOptions <KafkaSettings> >().Value; var kafkaConfig = new KafkaConsumerConfiguration(kafkaSettings.BrokerAddresses, kafkaSettings.GroupId, kafkaSettings.ClientId, kafkaSettings.SubscribedTopics); var valueDeserializer = new StringDeserializer(); var topics = kafkaSettings.SubscribedTopics; var eventBusSubscriptionsManager = sp.GetRequiredService <IEventBusSubscriptionsManager>(); return(new EventBusKafka(eventBusSubscriptionsManager, consumerFactory, iLifetimeScope, kafkaConfig, valueDeserializer)); }); services.AddSingleton <IEventBusSubscriptionsManager, InMemoryEventBusSubscriptionsManager>(); }
public KafkaTopicConsumer(string brokerList, List <string> topics, SslConfig sslConfig) { var config = new Dictionary <string, object> { { "bootstrap.servers", brokerList }, { "group.id", "euler" }, { "enable.auto.commit", false }, { "auto.commit.interval.ms", 5000 }, { "statistics.interval.ms", 60000 }, { "session.timeout.ms", 6000 }, { "auto.offset.reset", "earliest" } }; SetSslConfig(sslConfig, config); var keyDeserialiser = new StringDeserializer(Encoding.UTF8); var valueDeserialiser = new StringDeserializer(Encoding.UTF8); this.consumer = new Consumer <string, string>(config, keyDeserialiser, valueDeserialiser); this.topics = topics; }
public void Constuctor() { var config = new Dictionary <string, object>(); var e = Assert.Throws <ArgumentException>(() => { var c = new Consumer(config); }); Assert.True(e.Message.Contains("group.id")); e = Assert.Throws <ArgumentException>(() => { var c = new Consumer <Null, string>(config, null, new StringDeserializer(Encoding.UTF8)); }); Assert.True(e.Message.Contains("group.id")); e = Assert.Throws <ArgumentException>(() => { var validConfig = new Dictionary <string, object> { { "bootstrap.servers", "localhost:9092" }, { "group.id", "my-group" } }; var deserializer = new StringDeserializer(Encoding.UTF8); var c = new Consumer <string, string>(validConfig, deserializer, deserializer); }); Assert.True(e.Message.Contains("must not be the same object")); // positve case covered by integration tests. here, avoiding creating a rd_kafka_t instance. }
private static async Task Execute(string authorization, string hostname, string organization) { var labelQueryProvider = new TextResourceProvider(); var stringSerializer = new StringSerializer(); var stringDeserializer = new StringDeserializer(); var mapper = new DataToOwnerMapper(); var configurationProvider = new ConfigurationProvider(); configurationProvider.SetConfigurationValue("authorization", authorization); var webClient = new WebClient(stringSerializer, stringDeserializer, configurationProvider); var factory = new RepositoryQueryFactory(labelQueryProvider, stringSerializer); var queryExecutor = new RepositoryQueryExecutor(stringDeserializer, mapper, webClient); var query = factory.GetQuery(organization); var response = await queryExecutor.ExecuteQuery(hostname, query, WriteProgress); var responseAsString = stringSerializer.Serialize(response); var filePath = System.IO.Path.GetTempPath() + $"LabelRetrievalOutput_{DateTime.UtcNow.ToBinary()}.json"; System.IO.File.WriteAllText(filePath, responseAsString); Console.WriteLine(filePath); Console.WriteLine(); var uniquelyNamedLabels = response.Repositories.SelectMany(repo => repo.Labels).Distinct((x, y) => x.Name == y.Name).ToList(); Console.WriteLine($"Printing All Uniquely Named Labels (Total: {uniquelyNamedLabels.Count})"); uniquelyNamedLabels.ForEach(label => Console.WriteLine(label.Name)); "".ToString(); }
public void should_throw_exception_when_deserialize_into_wrong_type() { var deserializer = new StringDeserializer<Status>(); Assert.Throws<InvalidOperationException>(() => deserializer.Deserialize(TestObjectXmlResponse)); }
public void should_throw_exception_when_deserialize_into_wrong_type_such_as_one_that_is_not_wrapped_in_a_response_tag() { var deserializer = new StringDeserializer<Status>(); Assert.Throws<UnexpectedXmlContentException>(() => deserializer.Deserialize(TestObjectXmlResponse.Replace("response", "rexponse"))); }
public void should_throw_exception_when_deserialize_into_wrong_type_such_as_one_that_is_not_wrapped_in_a_response_tag() { var deserializer = new StringDeserializer(); Assert.Throws <UnexpectedXmlContentException>(() => deserializer.DeserializeApiResponse <Status>(TestObjectXmlResponse.Replace("response", "rexponse"))); }
public void should_throw_exception_when_deserialize_into_wrong_type() { var deserializer = new StringDeserializer(); Assert.Throws <UnexpectedXmlContentException>(() => deserializer.DeserializeApiResponse <Status>(TestObjectXmlResponse)); }
private static void Main(string[] args) { Mode mode = Mode.Profile; bool mix = false; var configuration = new Configuration { ConsumeBatchSize = 100 }; // Ugly command line parsing string curOpt = ""; try { bool seeds = false; bool topics = false; for (int i = 0; i < args.Length; ++i) { curOpt = args[i]; switch (args[i]) { case "--global": configuration.BatchStrategy = BatchStrategy.Global; break; case "--mix": mix = true; break; case "--stress": mode = Mode.Stress; break; case "--hard": mode = Mode.StressHard; break; case "--discard": configuration.ErrorStrategy = ErrorStrategy.Discard; break; case "--retry": configuration.ErrorStrategy = ErrorStrategy.Retry; break; case "--gzip": configuration.CompressionCodec = CompressionCodec.Gzip; break; case "--snappy": configuration.CompressionCodec = CompressionCodec.Snappy; break; case "--no-ack": configuration.RequiredAcks = RequiredAcks.None; break; case "--all-sync-ack": configuration.RequiredAcks = RequiredAcks.AllInSyncReplicas; break; case "--ttl": configuration.MessageTtl = TimeSpan.FromSeconds(int.Parse(args[++i])); break; case "--batch": configuration.ProduceBatchSize = int.Parse(args[++i]); break; case "--time": configuration.ProduceBufferingTime = TimeSpan.FromMilliseconds(int.Parse(args[++i])); break; case "--max-messages": configuration.MaxBufferedMessages = int.Parse(args[++i]); break; case "--topics": topics = true; _topics = args[++i].Split(','); break; case "--seeds": seeds = true; configuration.Seeds = args[++i]; break; case "--clientid": configuration.ClientId = args[++i]; break; case "--concurrency": configuration.MaximumConcurrency = int.Parse(args[++i]); break; case "--send-buffer": configuration.SendBufferSize = int.Parse(args[++i]); break; case "--receive-buffer": configuration.ReceiveBufferSize = int.Parse(args[++i]); break; case "--timeout": configuration.RequestTimeoutMs = int.Parse(args[++i]); break; case "--min-bytes": configuration.FetchMinBytes = int.Parse(args[++i]); break; case "--max-wait": configuration.FetchMaxWaitTime = int.Parse(args[++i]); break; case "--max-bytes": configuration.FetchMessageMaxBytes = int.Parse(args[++i]); break; case "--delay": configuration.NumberOfMessagesBeforeRoundRobin = int.Parse(args[++i]); break; case "--consume": { _consumeFrom = long.Parse(args[++i]); var p = args[++i].Split(';'); _partitions = new int[p.Length][]; for (int j = 0; j < _partitions.Length; ++j) { _partitions[j] = p[j].Split(',').Select(int.Parse).ToArray(); } } break; } } // Minimal error management if (args.Length < 1 || !seeds || !topics) { throw new ArgumentException(); } } catch { // Minimal error management Console.WriteLine("Syntax error in option {0}", curOpt); Usage(); Environment.Exit(-1); } var serializer = new StringSerializer(); var deserializer = new StringDeserializer(); var serializationConfig = new SerializationConfig() { SerializeOnProduce = true }; foreach (var topic in _topics) { serializationConfig.SetSerializersForTopic(topic, serializer, serializer); serializationConfig.SetDeserializersForTopic(topic, deserializer, deserializer); } configuration.SerializationConfig = serializationConfig; var cluster = new ClusterClient(configuration, new ConsoleLogger()); if (_partitions == null) { var task = Start(mode, cluster); Console.ReadKey(); _running = false; Console.ReadKey(); task.Wait(); } else { int i = 0; foreach (var topic in _topics) { var capturedTopic = topic; cluster.Messages.Where(kr => kr.Topic == capturedTopic).Sample(TimeSpan.FromMilliseconds(15)) .Subscribe(kr => Console.WriteLine("{0}/{1} {2}: {3}", kr.Topic, kr.Partition, kr.Offset, kr.Value as string)); foreach (var p in _partitions[i]) { cluster.Consume(topic, p, _consumeFrom); } ++i; } Task task = null; if (mix) { task = Start(mode, cluster); } Console.ReadKey(); i = 0; foreach (var topic in _topics) { foreach (var p in _partitions[i]) { if (p < 0) { cluster.StopConsume(topic); } else { cluster.StopConsume(topic, p); } } ++i; } if (task != null) { _running = false; task.Wait(); } } Console.WriteLine(cluster.Statistics); Console.ReadKey(); cluster.Dispose(); }
public KafkaConsumerClient(string groupId, KafkaSetting kafkaSetting) { _groupId = groupId; _kafkaSetting = kafkaSetting ?? throw new ArgumentNullException(nameof(KafkaSetting)); StringDeserializer = new StringDeserializer(Encoding.UTF8); }