private static async void Produce() { IConfluentClient client = new ConfluentClient(new MyConfluentClientSettings()); var records = new[] { new AvroRecord<string, Person> { PartitionId = Convert.ToInt32(0), Value = new Person { Name = Guid.NewGuid().ToString("N"), Age = 25 } }, new AvroRecord<string, Person> { Value = new Person { Name = Guid.NewGuid().ToString("N"), Age = 26 } } }; var recordSet = new AvroRecordSet<string, Person>(records) { //Creating schema using "Microsoft.Hadoop.Avro" - https://www.nuget.org/packages/Microsoft.Hadoop.Avro/ ValueSchema = AvroSerializer.Create<Person>().ReaderSchema.ToString() }; await client.PublishAsAvroAsync("TestTopic", recordSet); }
public static IConfluentClient CreateConfluentClient(String baseUrl) { var settings = new KafkaClientSettings { KafkaBaseUrl = baseUrl }; IConfluentClient client = new ConfluentClient(settings); return client; }
public void PublishMessage(string topic) { var response = new ConfluentClient(KafkaClientConfig.Uri).Publish(topic,new List<string> { FiveKiloByteUnEncodedMessage }).Result; var serializedRs = JsonConvert.DeserializeObject<KafkaRestResponse>(response); Assert.That(serializedRs.Offsets.Any()); }
public static IConfluentClient CreateConfluentClient(String baseUrl) { var settings = new KafkaClientSettings { KafkaBaseUrl = baseUrl }; IConfluentClient client = new ConfluentClient(settings); return(client); }
static void Main(string[] args) { _stopWatch = Stopwatch.StartNew(); var timer = new Timer(PrintStatistics, null, TimeSpan.Zero, TimeSpan.FromSeconds(1)); _client = new ConfluentClient(ConfigurationManager.AppSettings["Confluent.BaseUrl"]); try { int batchSize = Convert.ToInt32(ConfigurationManager.AppSettings["Confluent.BatchSize"]); var random = new Random(); if (batchSize <= 0) { batchSize = 1; } while (true) { var people = new Person[batchSize]; for (int i = 0; i < batchSize; i++) { string name = string.Format("[{0}] {1}", _totalLogs, Guid.NewGuid().ToString("N")); people[i] = new Person { Name = name, Age = random.Next(20, 100) }; Interlocked.Increment(ref _totalLogs); } string response = _client.Publish(Topic, people).Result; } } catch (Exception ex) { timer.Change(Timeout.Infinite, Timeout.Infinite); Console.WriteLine(ex); } finally { Console.WriteLine("Done!"); Console.ReadLine(); } }
private async static void Consume() { IConfluentClient client = new ConfluentClient(new MyConfluentClientSettings()); var request = new CreateConsumerRequest { // Confluent API will create a new InstanceId if not supplied InstanceId = "TestConsumerInstance", MessageFormat = MessageFormat.Avro }; ConfluentResponse<ConsumerInstance> response = await client.CreateConsumerAsync("TestConsumerGroup", request); ConsumerInstance consumerInstance = response.Payload; ConfluentResponse<List<AvroMessage<string, Person>>> response2 = await client.ConsumeAsAvroAsync<string, Person>(consumerInstance, "TestTopic"); foreach (AvroMessage<string, Person> message in response2.Payload) { Person person = message.Value; Console.WriteLine("Name: {0}, Age: {1}", person.Name, person.Age); } await client.CommitOffsetAsync(consumerInstance); }
public ConsumerApp() { InitializeComponent(); _client = new ConfluentClient(ConfigurationManager.AppSettings["Confluent.BaseUrl"]); }
private static string ConsumeMessages(string topic) { var client = new ConfluentClient(KafkaClientConfig.Uri); var consumer = JsonConvert.DeserializeObject<ConsumerResponse>(client.CreateConsumer(TestConstants.ConsumerGroup).Result); return client.Consume(topic,TestConstants.ConsumerGroup,consumer.InstanceId).Result; }
public ConsumerClient() { _client = new ConfluentClient(KafkaClientConfig.Uri); }