static void ProducerSendTPSTest() { int batchCount = 100000; var queueClient = new KafkaProducer(commandQueue, zkConnectionString); var message = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffffff"); var kafkaMessage = new Kafka.Client.Messages.Message(Encoding.UTF8.GetBytes(message)); var data = new Kafka.Client.Producers.ProducerData <string, Kafka.Client.Messages.Message>(commandQueue, message, kafkaMessage); double totalSendRt = 0; Stopwatch watch = new Stopwatch(); watch.Start(); for (int i = 0; i < batchCount; i++) { try { var start = DateTime.Now; queueClient.Send(data); totalSendRt += (DateTime.Now - start).TotalMilliseconds; } catch (Exception ex) { Console.WriteLine(ex.GetBaseException().Message); } } var elapsedMs = watch.ElapsedMilliseconds; Console.WriteLine($"cost: {elapsedMs} tps: {batchCount * 1000 / elapsedMs} rt: {totalSendRt / (double)batchCount}"); Console.ReadLine(); }
private static void MicrosoftKafkaTest() { var brokerConfig = new BrokerConfiguration() { Host = "0.tcp.ngrok.io", Port = 15069 }; var config = new ProducerConfiguration(new List <BrokerConfiguration> { brokerConfig }); var producer = new Kafka.Client.Producers.Producer(config); var payload = Encoding.ASCII.GetBytes(@" { 'method': 'POST', 'timeStamp': 1508164715, 'foo': { 'id': 123 } }"); var message = new Kafka.Client.Messages.Message(payload); var data = new ProducerData <string, Kafka.Client.Messages.Message>("fun-topic", message); try { producer.Send(data); } catch (Exception ex) { Console.WriteLine(ex.Message); } }
public void ProducerTest() { var queueClient = new KafkaProducer(commandQueue, _zkConnection); var message = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffffff"); var kafkaMessage = new Kafka.Client.Messages.Message(Encoding.UTF8.GetBytes(message)); var data = new Kafka.Client.Producers.ProducerData <string, Kafka.Client.Messages.Message>(commandQueue, message, kafkaMessage); queueClient.Send(data); Console.WriteLine($"send message: {message}"); queueClient.Stop(); ZookeeperConsumerConnector.zkClientStatic.Dispose(); }
//public void CompleteMessage(IMessageContext messageContext) //{ // (messageContext as MessageContext).Complete(); // _logger.Debug($"complete message {messageContext.Message.ToJson()}"); //} public void Publish(IMessageContext messageContext, string topic) { topic = Configuration.Instance.FormatMessageQueueName(topic); var topicClient = GetTopicClient(topic); var jsonValue = ((MessageContext)messageContext).KafkaMessage.ToJson(); var message = new Kafka.Client.Messages.Message(Encoding.UTF8.GetBytes(jsonValue)); var producerData = new ProducerData <string, Kafka.Client.Messages.Message>(topic, messageContext.Key, message); try { topicClient.Send(producerData); } catch (Exception ex) { _logger.Error($"send message failed: {jsonValue}.", ex); } }
static void GroupConsuemrTest() { var cancellationTokenSource = new CancellationTokenSource(); var consumerTasks = new List <Task>(); for (int i = 0; i < 1; i++) { consumerTasks.Add(CreateConsumerTask(commandQueue, i.ToString(), cancellationTokenSource)); } var queueClient = new KafkaProducer(commandQueue, zkConnectionString); while (true) { var message = Console.ReadLine(); if (message.Equals("q")) { cancellationTokenSource.Cancel(); Task.WaitAll(consumerTasks.ToArray()); break; } message = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.ffffff"); var kafkaMessage = new Kafka.Client.Messages.Message(Encoding.UTF8.GetBytes(message)); var data = new Kafka.Client.Producers.ProducerData <string, Kafka.Client.Messages.Message>(commandQueue, message, kafkaMessage); try { queueClient.Send(data); Console.WriteLine($"send message: {message}"); } catch (Exception ex) { Console.WriteLine(ex.GetBaseException().Message); } } }