static Productor() { options = new KafkaOptions(new Uri("http://localhost:9092")); router = new BrokerRouter(options); client = new KafkaNet.Producer(router); client.BatchDelayTime = TimeSpan.FromSeconds(2); }
public async Task FetchMessagesCacheContainsAllRequestTest() { // Creating a broker router and a protocol gateway for the producer and consumer var brokerRouter = new BrokerRouter(_options); var protocolGateway = new ProtocolGateway(_kafkaUri); Producer producer = new Producer(brokerRouter); ManualConsumer consumer = new ManualConsumer(_partitionId, _topic, protocolGateway, "TestClient", DefaultMaxMessageSetSize); var offset = await consumer.FetchLastOffset(); // Creating 5 messages List<Message> messages = CreateTestMessages(10, 1); await producer.SendMessageAsync(_topic, messages, partition: _partitionId, timeout: TimeSpan.FromSeconds(3)); // Now let's consume var result = (await consumer.FetchMessages(5, offset)).ToList(); CheckMessages(messages.Take(5).ToList(), result); // Now let's consume again result = (await consumer.FetchMessages(5, offset + 5)).ToList(); CheckMessages(messages.Skip(5).ToList(), result); }
public static void twitterInfo() { var options = new KafkaOptions (new Uri("http://localhost:9092")); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); const String topic = "newtest1"; const String tokenSecret = "FbcP68UtZR8U6n0AIMkBZHxgx4hzv3ibLQrU35qQipQ7Y"; const String consumerSecret = "xL20m2W34vg8dhxjxvBs0R9vvx3dZCw0fYAG9UjhVRBqgHTQ9d"; const String token = "2710082868-swCbtRmmODBOB6TMbsDGIQNMCUKoATTVAwbCQwi"; const String consumerKey = "XC6GdAJVYF9jMQqS68bQOu6kG"; Auth.SetUserCredentials(consumerKey, consumerSecret, token, tokenSecret); var stream = Tweetinvi.Stream.CreateFilteredStream(); stream.AddTrack("Politics"); stream.MatchingTweetReceived += (sender, arguments) => { Console.WriteLine(arguments.Tweet.Text); client.SendMessageAsync(topic, new[] { new KafkaNet.Protocol.Message(arguments.Tweet.Text) }).Wait(); }; stream.StartStreamMatchingAllConditions(); }
public void ConsumerShouldConsumeInSameOrderAsProduced() { var expected = new List<string> { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19" }; var testId = Guid.NewGuid().ToString(); using (var router = new BrokerRouter(new KafkaOptions(IntegrationConfig.IntegrationUri))) using (var producer = new Producer(router)) { var offsets = producer.GetTopicOffsetAsync(IntegrationConfig.IntegrationTopic).Result; using (var consumer = new Consumer(new ConsumerOptions(IntegrationConfig.IntegrationTopic, router), offsets.Select(x => new OffsetPosition(x.PartitionId, x.Offsets.Max())).ToArray())) { for (int i = 0; i < 20; i++) { producer.SendMessageAsync(IntegrationConfig.IntegrationTopic, new[] { new Message(i.ToString(), testId) }).Wait(); } var results = consumer.Consume().Take(20).ToList(); //ensure the produced messages arrived Console.WriteLine("Message order: {0}", string.Join(", ", results.Select(x => x.Value.ToUtf8String()).ToList())); Assert.That(results.Count, Is.EqualTo(20)); Assert.That(results.Select(x => x.Value.ToUtf8String()).ToList(), Is.EqualTo(expected), "Expected the message list in the correct order."); Assert.That(results.Any(x => x.Key.ToUtf8String() != testId), Is.False); } } }
public void Main(string[] args) { var options = GetOptions(args); if (options == null) return; var count = 0; var lastCount = 0; var reporter = new Task(() => { while (true) { var current = count; Console.WriteLine("{0} messages in last second.", current - lastCount); lastCount = current; Thread.Sleep(1000); } }); var kafkaOptions = new KafkaOptions(options.KafkaNodeUri);// { Log = new ConsoleLog() }; using (var router = new BrokerRouter(kafkaOptions)) using (var client = new KafkaNet.Producer(router)) { reporter.Start(); while (true) { Thread.Sleep(100); client.SendMessageAsync("TestHarness", new[] { new Message() { Value = BitConverter.GetBytes(DateTime.Now.Ticks) } }); count++; } } }
public void Run() { var options = new KafkaOptions(new Uri("http://localhost:9092")); var router = new BrokerRouter(options); _producer = new KafkaNet.Producer(router); Process(); }
public void ConsumerShouldBeAbleToSeekBackToEarlierOffset() { var producer = new Producer(_router); var offsets = producer.GetTopicOffsetAsync("LoadTest").Result .Select(x => new OffsetPosition(x.PartitionId, x.Offsets.Max())).ToArray(); var consumer = new Consumer(new ConsumerOptions("LoadTest", _router), offsets); var tasks = new List<Task<List<ProduceResponse>>>(); for (int i = 0; i < 20; i++) { tasks.Add(producer.SendMessageAsync("LoadTest", new[] { new Message { Value = i.ToString(), Key = "1" } })); } Task.WaitAll(tasks.ToArray()); var results = consumer.Consume().Take(20).ToList(); //ensure the produced messages arrived for (int i = 0; i < 20; i++) { Assert.That(results[i].Value == i.ToString()); } //seek back to initial offset consumer.SetOffsetPosition(offsets); //ensure all produced messages arrive again for (int i = 0; i < 20; i++) { Assert.That(results[i].Value == i.ToString()); } }
public virtual async Task UpsertDocument <TDocument>(IUpsertDocumentContext <TDocument> context) where TDocument : class { var topicName = context.Document.GetType().Name; var config = this._producerConfigManager.GetConfiguration(x => (x.ConfigurationScope & ConfigurationScope.Producer) == ConfigurationScope.Producer); var options = new KafkaOptions (new Uri("http://localhost:9092")); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); client.SendMessageAsync(topicName, new[] { new Message("!!!Test message sent from alternative .Net => kafka provider!!!") }, -1, TimeSpan.FromSeconds(5)) .Wait(); //var valueSerialiser = new BinarySerializer<TDocument>(); // var keySerialiser = new BinarySerializer<Guid>(); //var deliveryHandler = new DeliveryHandler<Guid, TDocument>(); //using (var producer = new Producer<Guid, TDocument>(config, keySerialiser, valueSerialiser)) //{ // var deliveryReport = await producer.ProduceAsync(topicName, context.Id, context.Document); // //producer.ProduceAsync(topicName, null, context.Document, deliveryHandler); // //producer.Flush(); //} }
public void ConsumerShouldBeAbleToGetCurrentOffsetInformation() { var producer = new Producer(_router); var startOffsets = producer.GetTopicOffsetAsync("LoadTest").Result .Select(x => new OffsetPosition(x.PartitionId, x.Offsets.Max())).ToArray(); var consumer = new Consumer(new ConsumerOptions("LoadTest", _router), startOffsets); var tasks = new List<Task<List<ProduceResponse>>>(); for (int i = 0; i < 20; i++) { tasks.Add(producer.SendMessageAsync("LoadTest", new[] { new Message { Value = i.ToString(), Key = "1" } })); } Task.WaitAll(tasks.ToArray()); var results = consumer.Consume().Take(20).ToList(); //ensure the produced messages arrived for (int i = 0; i < 20; i++) { Assert.That(results[i].Value == i.ToString()); } //the current offsets should be 20 positions higher than start var currentOffsets = consumer.GetOffsetPosition(); Assert.That(currentOffsets.Sum(x => x.Offset) - startOffsets.Sum(x => x.Offset), Is.EqualTo(20)); }
public async Task ProtocolGateway() { int partitionId = 0; var router = new BrokerRouter(Options); var producer = new Producer(router); string messge1 = Guid.NewGuid().ToString(); var respose = await producer.SendMessageAsync(IntegrationConfig.IntegrationTopic, new[] { new Message(messge1) }, 1, null, MessageCodec.CodecNone, partitionId); var offset = respose.FirstOrDefault().Offset; ProtocolGateway protocolGateway = new ProtocolGateway(IntegrationConfig.IntegrationUri); var fetch = new Fetch { Topic = IntegrationConfig.IntegrationTopic, PartitionId = partitionId, Offset = offset, MaxBytes = 32000, }; var fetches = new List<Fetch> { fetch }; var fetchRequest = new FetchRequest { MaxWaitTime = 1000, MinBytes = 10, Fetches = fetches }; var r = await protocolGateway.SendProtocolRequest(fetchRequest, IntegrationConfig.IntegrationTopic, partitionId); // var r1 = await protocolGateway.SendProtocolRequest(fetchRequest, IntegrationConfig.IntegrationTopic, partitionId); Assert.IsTrue(r.Messages.FirstOrDefault().Value.ToUtf8String() == messge1); }
public Producer() { var options = new KafkaOptions(new Uri("http://localhost:9092")); var router = new BrokerRouter(options); client = new KafkaNet.Producer(router); }
public void ProducerShouldReportCorrectAmountOfAsyncRequests() { var semaphore = new SemaphoreSlim(0); var routerProxy = new FakeBrokerRouter(); //block the second call returning from send message async routerProxy.BrokerConn0.ProduceResponseFunction = () => { semaphore.Wait(); return new ProduceResponse(); }; var router = routerProxy.Create(); using (var producer = new Producer(router, maximumAsyncRequests: 1) { BatchSize = 1 }) { var messages = new[] { new Message("1") }; Assert.That(producer.AsyncCount, Is.EqualTo(0)); var sendTask = producer.SendMessageAsync(BrokerRouterProxy.TestTopic, messages); TaskTest.WaitFor(() => producer.AsyncCount > 0); Assert.That(producer.AsyncCount, Is.EqualTo(1), "One async operation should be sending."); semaphore.Release(); sendTask.Wait(TimeSpan.FromMilliseconds(500)); Assert.That(sendTask.IsCompleted, Is.True, "Send task should be marked as completed."); Assert.That(producer.AsyncCount, Is.EqualTo(0), "Async should now show zero count."); } }
static void Main(string[] args) { const string topicName = "kafka.SimpleTopic"; TopicClient topicClient = new TopicClient(); TopicView topicView = topicClient.getTopic(topicName); List <Uri> brokers = getBrokerList(topicView); //create an options file that sets up driver preferences var options = new KafkaOptions() { Log = new ConsoleLog() }; options.KafkaServerUri = brokers; //start an out of process thread that runs a consumer that will write all received messages to the console Task.Run(() => { var consumer = new Consumer(new ConsumerOptions(topicName, new BrokerRouter(options)) { Log = new ConsoleLog() }); foreach (var data in consumer.Consume()) { Console.WriteLine("Response: P{0},O{1} : {2}", data.Meta.PartitionId, data.Meta.Offset, data.Value.ToUtf8String()); } }); //create a producer to send messages with var producer = new KafkaNet.Producer(new BrokerRouter(options)) { BatchSize = 100, BatchDelayTime = TimeSpan.FromMilliseconds(2000) }; //take in console read messages Console.WriteLine("Type a message and press enter..."); while (true) { var message = Console.ReadLine(); if (message == "quit") { break; } if (string.IsNullOrEmpty(message)) { //send a random batch of messages SendRandomBatch(producer, topicName, 200); } else { producer.SendMessageAsync(topicName, new[] { new Message(message) }); } } using (producer) { } }
public void EnsureProducerDisposesRouter() { var router = _kernel.GetMock<IBrokerRouter>(); var producer = new Producer(router.Object); using (producer) { } router.Verify(x => x.Dispose(), Times.Once()); }
static void Main(string[] args) { var options = new KafkaOptions(new Uri("http://CSDKAFKA01:9092"), new Uri("http://CSDKAFKA02:9092")) { Log = new ConsoleLog() }; var router = new BrokerRouter(options); var client = new Producer(router); Task.Factory.StartNew(() => { var consumer = new Consumer(new ConsumerOptions("TestHarness", router)); foreach (var data in consumer.Consume()) { Console.WriteLine("Response: P{0},O{1} : {2}", data.Meta.PartitionId, data.Meta.Offset, data.Value); } }); Console.WriteLine("Type a message and press enter..."); while (true) { var message = Console.ReadLine(); if (message == "quit") break; client.SendMessageAsync("TestHarness", new[] {new Message {Value = message}}); } using (client) using (router) { } }
public void EnsureGzipCanDecompressMessageFromKafka() { var router = new BrokerRouter(_options); var producer = new Producer(router); var offsets = producer.GetTopicOffsetAsync(IntegrationConfig.IntegrationCompressionTopic).Result; var consumer = new Consumer(new ConsumerOptions(IntegrationConfig.IntegrationCompressionTopic, router) { PartitionWhitelist = new List<int>() { 0 } }, offsets.Select(x => new OffsetPosition(x.PartitionId, x.Offsets.Max())).ToArray()); int numberOfmessage = 3; for (int i = 0; i < numberOfmessage; i++) { producer.SendMessageAsync(IntegrationConfig.IntegrationCompressionTopic, new[] { new Message(i.ToString()) }, codec: MessageCodec.CodecGzip, partition: 0); } var results = consumer.Consume(new CancellationTokenSource(TimeSpan.FromMinutes(1)).Token).Take(numberOfmessage).ToList(); for (int i = 0; i < numberOfmessage; i++) { Assert.That(results[i].Value.ToUtf8String(), Is.EqualTo(i.ToString())); } using (producer) using (consumer) { } }
public DestinationKafka(params Uri[] servers) { var options = new KafkaOptions(servers) { Log = new ConsoleLogger() }; _router = new BrokerRouter(options); _producer = new Producer(_router, maximumMessageBuffer: 5000, maximumAsyncRequests: 10) { BatchSize = 1000, BatchDelayTime = TimeSpan.FromSeconds(1) }; StatisticsTracker.OnStatisticsHeartbeat += StatisticsTracker_OnStatisticsHeartbeat; }
public async Task ContarAnuncios(Anuncio anuncio) { var options = new KafkaOptions (new Uri("http://localhost:9092")); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); anuncio.Empresa = null; string jsonString = JsonConvert.SerializeObject(anuncio); client.SendMessageAsync("anuncios", new[] { new Message(jsonString) }).Wait(); }
static void Main(string[] args) { var options = new KafkaOptions(new Uri("http://evs01cpb592:9092")); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); client.SendMessageAsync("testtopic", new[] { new Message("Hi Hello! Welcome to Kafka!") }).Wait(); Console.ReadLine(); }
public async void SendAsyncShouldGetOneResultForMessage() { using (var router = new BrokerRouter(new KafkaOptions(IntegrationConfig.IntegrationUri))) using (var producer = new Producer(router)) { var result = await producer.SendMessageAsync(IntegrationConfig.IntegrationTopic, new[] { new Message(Guid.NewGuid().ToString()) }); Assert.That(result.Count, Is.EqualTo(1)); } }
public async void SendAsyncShouldGetAResultForEachPartitionSentTo() { using (var router = new BrokerRouter(new KafkaOptions(IntegrationConfig.IntegrationUri))) using (var producer = new Producer(router)) { var result = await producer.SendMessageAsync(IntegrationConfig.IntegrationTopic, new[] { new Message("1"), new Message("2"), new Message("3") }); Assert.That(result.Count, Is.EqualTo(2)); } }
static void Main(string[] args) { var options = new KafkaOptions(new Uri("http://sdzyuban-mesos-01:31000")); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); client.SendMessageAsync("test", new[] { new Message("hello world") }).Wait(); using (client) { } }
static void Main(string[] args) { const string topicName = "kafka.SimpleTopic"; TopicClient topicClient = new TopicClient(); TopicView topicView = topicClient.getTopic(topicName); List<Uri> brokers = getBrokerList(topicView); //create an options file that sets up driver preferences var options = new KafkaOptions() { Log = new ConsoleLog() }; options.KafkaServerUri = brokers; //start an out of process thread that runs a consumer that will write all received messages to the console Task.Run(() => { var consumer = new Consumer(new ConsumerOptions(topicName, new BrokerRouter(options)) { Log = new ConsoleLog() }); foreach (var data in consumer.Consume()) { Console.WriteLine("Response: P{0},O{1} : {2}", data.Meta.PartitionId, data.Meta.Offset, data.Value.ToUtf8String()); } }); //create a producer to send messages with var producer = new KafkaNet.Producer(new BrokerRouter(options)) { BatchSize = 100, BatchDelayTime = TimeSpan.FromMilliseconds(2000) }; //take in console read messages Console.WriteLine("Type a message and press enter..."); while (true) { var message = Console.ReadLine(); if (message == "quit") break; if (string.IsNullOrEmpty(message)) { //send a random batch of messages SendRandomBatch(producer, topicName, 200); } else { producer.SendMessageAsync(topicName, new[] { new Message(message) }); } } using (producer) { } }
public async Task ManualConsumerFailure() { string topic = "TestTopicIssue13-3R-1P"; var manualConsumer = new ManualConsumer(0, topic, new ProtocolGateway(_options), "test client", 10000); long offset = await manualConsumer.FetchLastOffset(); var router = new BrokerRouter(_options); var producer = new Producer(router); SandMessageForever(producer, topic); await ReadMessageForever(manualConsumer, offset); }
public Producer(string topic, string endpoint) { this.topic = !string.IsNullOrEmpty(topic) ? topic : "test_topic"; this.kafkaEndpoint = !string.IsNullOrEmpty(endpoint) ? endpoint : "http://localhost:9092"; Uri uri = new Uri(kafkaEndpoint); var options = new KafkaOptions(uri); var router = new BrokerRouter(options); producer = new KafkaNet.Producer(router); }
public async Task ProducerAckLevel() { using (var router = new BrokerRouter(new KafkaOptions(IntegrationConfig.IntegrationUri) { Log = IntegrationConfig.NoDebugLog })) using (var producer = new Producer(router)) { var responseAckLevel0 = await producer.SendMessageAsync(new Message("Ack Level 0"), IntegrationConfig.IntegrationTopic, acks: 0, partition: 0); Assert.AreEqual(responseAckLevel0.Offset, -1); var responseAckLevel1 = await producer.SendMessageAsync(new Message("Ack Level 1"), IntegrationConfig.IntegrationTopic, acks: 1, partition: 0); Assert.That(responseAckLevel1.Offset, Is.GreaterThan(-1)); } }
public static Producer GetProducer(this Kafka kafkaObj) { if (_producer == null) { var addresses = from x in kafkaObj.brokers select new Uri(x.address); var router = new BrokerRouter(new KafkaOptions(addresses.ToArray())); _producer = new Producer(router); } return _producer; }
public async Task ProducerAckLevel1ResponseOffsetShouldBeEqualToLastOffset() { using (var router = new BrokerRouter(new KafkaOptions(IntegrationConfig.IntegrationUri) { Log = IntegrationConfig.NoDebugLog })) using (var producer = new Producer(router)) { var responseAckLevel1 = await producer.SendMessageAsync(new Message("Ack Level 1"), IntegrationConfig.IntegrationTopic, acks: 1, partition: 0); var offsetResponse = await producer.GetTopicOffsetAsync(IntegrationConfig.IntegrationTopic); var maxOffset = offsetResponse.Find(x => x.PartitionId == 0); Assert.AreEqual(responseAckLevel1.Offset, maxOffset.Offsets.Max() - 1); } }
public void ProducerShouldNotExpectResponseWhenAckIsZero() { using (var router = new BrokerRouter(new KafkaOptions(IntegrationConfig.IntegrationUri))) using (var producer = new Producer(router)) { var sendTask = producer.SendMessageAsync(IntegrationConfig.IntegrationTopic, new[] { new Message(Guid.NewGuid().ToString()) }, acks: 0); sendTask.Wait(TimeSpan.FromMinutes(2)); Assert.That(sendTask.Status, Is.EqualTo(TaskStatus.RanToCompletion)); } }
public void Send(Point[] points) { using (var client = new Producer(_router)) { var messages = points.Select(x => { var pointToString = _formatter.PointToString(x); return new Message(pointToString); }).ToArray(); client.SendMessageAsync("InfluxCapacitor", messages).Wait(); } }
public async Task Send(object channel, Address address, TransportMessage message, object properties) { var options = new KafkaOptions(new Uri(address.Machine)); var router = new BrokerRouter(options); var topic = address.Queue; var messageString = System.Text.Encoding.Default.GetString(message.Body); using (var client = new Producer(router)) { await client.SendMessageAsync(topic, new[] { new Message(messageString) }); } }
static void Main(string[] args) { var options = new KafkaOptions(new Uri("http://sjkap556:9092")); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); for (int i = 0; i < 1; i++) { client.SendMessageAsync("testCockpit", new[] { new Message(DateTime.Now + " -- Teste: " + i) }).Wait(); } Console.ReadLine(); }
static void Main(string[] args) { const string topicName = "TestHarness"; //create an options file that sets up driver preferences var options = new KafkaOptions(new Uri("http://CSDKAFKA01:9092"), new Uri("http://CSDKAFKA02:9092")) { Log = new ConsoleLog() }; //start an out of process thread that runs a consumer that will write all received messages to the console Task.Run(() => { var consumer = new Consumer(new ConsumerOptions(topicName, new BrokerRouter(options)) { Log = new ConsoleLog() }); foreach (var data in consumer.Consume()) { Console.WriteLine("Response: P{0},O{1} : {2}", data.Meta.PartitionId, data.Meta.Offset, data.Value.ToUtf8String()); } }); //create a producer to send messages with var producer = new Producer(new BrokerRouter(options)) { BatchSize = 100, BatchDelayTime = TimeSpan.FromMilliseconds(2000) }; //take in console read messages Console.WriteLine("Type a message and press enter..."); while (true) { var message = Console.ReadLine(); if (message == "quit") break; if (string.IsNullOrEmpty(message)) { //send a random batch of messages SendRandomBatch(producer, topicName, 200); } else { producer.SendMessageAsync(topicName, new[] { new Message(message) }); } } using (producer) { } }
private static void Produce(string broker, string topic) { var options = new KafkaOptions(new Uri(broker)); var router = new BrokerRouter(options); var client = new Producer(router); var current_datetime =DateTime.Now; var key = current_datetime.Second.ToString(); var events = new[] { new Message("Hello World " + current_datetime.ToString(), key) }; client.SendMessageAsync(topic, events).Wait(1500); Console.WriteLine("Produced: Key: {0}. Message: {1}", key, events[0].Value.ToUtf8String()); using (client) { } }
public static void ProduceRequest(string subTopicAsPayload, string requestString) { string topic = ServiceTopics.RequestTopic; string payload = topic + subTopicAsPayload + requestString; Message msg = new Message(payload); Uri uri = new Uri("http://localhost:9092"); var options = new KafkaOptions(uri); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); client.SendMessageAsync(topic, new List <Message> { msg }).Wait(); }
public Form1() { InitializeComponent(); //Inicializa Cliente Kafka options = new KafkaOptions(new Uri("http://stampsnet.hashtagsource.com:9092")); router = new BrokerRouter(options); client = new KafkaNet.Producer(router); //Le arquivo de entrada; leArquivoDeEntradas(); iniciaThreads(); }
public void ConsumerFailure() { string topic = "TestTopicIssue13-2-3R-1P"; using (var router = new BrokerRouter(_options)) { var producer = new Producer(router); var offsets = producer.GetTopicOffsetAsync(topic).Result; var maxOffsets = offsets.Select(x => new OffsetPosition(x.PartitionId, x.Offsets.Max())).ToArray(); var consumerOptions = new ConsumerOptions(topic, router) { PartitionWhitelist = new List<int>() { 0 }, MaxWaitTimeForMinimumBytes = TimeSpan.Zero }; SandMessageForever(producer, topic); ReadMessageForever(consumerOptions, maxOffsets); } }
private static async void SendRandomBatch(KafkaNet.Producer producer, string topicName, int count) { //send multiple messages var sendTask = producer.SendMessageAsync(topicName, Enumerable.Range(0, count).Select(x => new Message(x.ToString()))); Console.WriteLine("Posted #{0} messages. Buffered:{1} AsyncCount:{2}", count, producer.BufferCount, producer.AsyncCount); var response = await sendTask; Console.WriteLine("Completed send of batch: {0}. Buffered:{1} AsyncCount:{2}", count, producer.BufferCount, producer.AsyncCount); foreach (var result in response.OrderBy(x => x.PartitionId)) { Console.WriteLine("Topic:{0} PartitionId:{1} Offset:{2}", result.Topic, result.PartitionId, result.Offset); } }
/// <summary> /// Sends request and response raw data to Kafka /// </summary> /// <param name="data">Request and response metrics in raw form</param> /// <returns>A completed task</returns> public override async Task Write(RequestResponseData data) { try { KafkaOptions options = new KafkaOptions(new Uri("http://SERVER1:9092"), new Uri("http://SERVER2:9092")); BrokerRouter router = new BrokerRouter(options); Producer client = new Producer(router); client.SendMessageAsync("API-Meter Data", new[] { new Message { Value = "NULL for now" } }).Wait(); } catch (Exception ex) { Trace.Write(ex.Message); } }
public void EnsureGzipCanDecompressMessageFromKafka() { var producer = new Producer(_router); var offsets = producer.GetTopicOffsetAsync(CompressTopic).Result; var consumer = new Consumer(new ConsumerOptions("Empty", _router), offsets.Select(x => new OffsetPosition(x.PartitionId, 0)).ToArray()); var results = consumer.Consume().Take(3).ToList(); for (int i = 0; i < 3; i++) { Assert.That(results[i].Value, Is.EqualTo(i.ToString())); } }
public void ProducerShouldGroupMessagesByBroker() { var router = _routerProxy.Create(); var producer = new Producer(router); var messages = new List<Message> { new Message{Value = "1"}, new Message{Value = "2"} }; var response = producer.SendMessageAsync("UnitTest", messages).Result; Assert.That(response.Count, Is.EqualTo(2)); Assert.That(_routerProxy.BrokerConn0.ProduceRequestCallCount, Is.EqualTo(1)); Assert.That(_routerProxy.BrokerConn1.ProduceRequestCallCount, Is.EqualTo(1)); }
public Form1() { InitializeComponent(); options = new KafkaOptions(new Uri("http://stampsnet.hashtagsource.com:9092")); router = new BrokerRouter(options); client = new KafkaNet.Producer(router); string[] ports = SerialPort.GetPortNames(); foreach (string port in ports) { comboBox1.Items.Add(port); } comboBox2.SelectedIndex = 10; timer1.Start(); timer2.Start(); }
private static async void SendRandomBatch(Producer producer, string topicName, int count) { //send multiple messages var sendTask = producer.SendMessageAsync(topicName, Enumerable.Range(0, count).Select(x => new Message(x.ToString()))); Console.WriteLine("Posted #{0} messages. Buffered:{1} AsyncCount:{2}", count, producer.BufferCount, producer.AsyncCount); var response = await sendTask; Console.WriteLine("Completed send of batch: {0}. Buffered:{1} AsyncCount:{2}", count, producer.BufferCount, producer.AsyncCount); foreach (var result in response.OrderBy(x => x.PartitionId)) { Console.WriteLine("Topic:{0} PartitionId:{1} Offset:{2}", result.Topic, result.PartitionId, result.Offset); } }
public async Task ShouldSendAsyncToAllConnectionsEvenWhenExceptionOccursOnOne() { var routerProxy = new FakeBrokerRouter(); routerProxy.BrokerConn1.ProduceResponseFunction = () => { throw new KafkaApplicationException("some exception"); }; var router = routerProxy.Create(); using (var producer = new Producer(router)) { var messages = new List<Message> { new Message("1"), new Message("2") }; var sendTask = producer.SendMessageAsync("UnitTest", messages).ConfigureAwait(false); Assert.Throws<KafkaApplicationException>(async () => await sendTask); Assert.That(routerProxy.BrokerConn0.ProduceRequestCallCount, Is.EqualTo(1)); Assert.That(routerProxy.BrokerConn1.ProduceRequestCallCount, Is.EqualTo(1)); } }
private void kafkanetSend_click(object sender, EventArgs e) { //kafka-net library is used in this approach string payload = $"Welcome to Kafka {i++}"; string topic = "test-topic"; KafkaNet.Protocol.Message msg = new KafkaNet.Protocol.Message(payload); Uri uri = new Uri("http://localhost:9092"); var options = new KafkaOptions(uri); var router = new BrokerRouter(options); KafkaNet.Producer client = new KafkaNet.Producer(router); client.SendMessageAsync(topic, new List <KafkaNet.Protocol.Message> { msg }); Console.WriteLine("sent"); }
public Form1() { InitializeComponent(); foreach (var item in SerialPort.GetPortNames()) { comboBox1.Items.Add(item); } comboBox2.SelectedIndex = 0; bloquear = true; options = new KafkaOptions(new Uri("http://35.202.217.21:9092")); //options = new KafkaOptions(new Uri("http://192.168.1.30:9092")); router = new BrokerRouter(options); client = new KafkaNet.Producer(router); consumer = new Consumer(new ConsumerOptions("Teste", router)); Thread _lekafka = new Thread(leKafka); _lekafka.Start(); }
static Productor() { options = new KafkaOptions(new Uri("http://localhost:9092")); router = new BrokerRouter(options); client = new KafkaNet.Producer(router); }