public AvroSerializerTask(ILogger logger, CountdownEvent countDownEvent, ThreadSafeQueue <User> usersQueue, Kafka kafka, int numExpected) : base(logger, countDownEvent) { UsersQueue = usersQueue; KafkaConnection = kafka; NumExpected = numExpected; Log = logger as Logger; }
public KafkaReadThread(ILogger logger, CountdownEvent countDownEvent, Kafka kafka, string topic, int numExpected) : base(logger, countDownEvent) { KafkaConnection = kafka; Topic = topic; NumExpected = numExpected; Log = logger as Logger; }
public KafkaWriteTask(ILogger logger, CountdownEvent countdownEvent, Kafka kafka, string topic, int batchSize, int numExpected) : base(logger, countdownEvent) { Log = logger as Logger; KafkaConnection = kafka; Topic = topic; BatchSize = batchSize; NumExpected = numExpected; }
static void SendMessagesToKafka(Kafka kafka, List <byte[]> msgs, string topic, Logger log, ref Stopwatch sw) { sw.Reset(); log.WriteLogInfo($"Producing {msgs.Count} messages to Kafka."); sw.Start(); kafka.ProduceMessages(msgs, topic); sw.Stop(); log.WriteLogInfo($"Messages produced in {sw.ElapsedMilliseconds} ms."); }
public AvroDeserializeTask(ILogger logger, CountdownEvent countDownEvent, Kafka kafka, ThreadSafeQueue <User> deserializedQueue, AvroSerializer serializer, int numExpected) : base(logger, countDownEvent) { Log = logger as Logger; KafkaConnection = kafka; DeserializedQueue = deserializedQueue; Serializer = serializer; NumExpected = numExpected; }
static List <byte[]> ReadMessagesFromKafka(Kafka kafka, string topic, int numExpected, Logger log, ref Stopwatch sw) { sw.Reset(); log.WriteLogInfo($"Consuming messages from Kafka."); var received = new List <byte[]>(); kafka.Cons.OnMessage += (_, message) => { received.Add(message.Value); }; sw.Start(); while (received.Count < numExpected) { kafka.Cons.Poll(100); Thread.Sleep(100); } sw.Stop(); log.WriteLogInfo($"{received.Count} messages consumed in {sw.ElapsedMilliseconds} ms."); return(received); }
static void Main(string[] args) { System.Threading.ThreadPool.SetMinThreads(50, 30); var globalSw = new Stopwatch(); var taskSw = new Stopwatch(); var topic = "speed_test_dotnet"; var multithreaded = false; var load = false; int num = 1000; #region parseCommandLineArgs if (args.Length > 0) { foreach (var arg in args) { if (arg.Contains("mt")) { multithreaded = true; } else if (arg.Contains("load")) { load = true; } else if (!int.TryParse(arg, out num)) { GlobalLogger.WriteLogError($"Argument {arg} is not valid."); PrintUsage(); Environment.Exit(2); } } } #endregion parseCommandLineArgs var serializer = new AvroSerializer(User._SCHEMA); var brokers = System.Environment.GetEnvironmentVariable("KAFKA_BROKERS"); #if DEBUG brokers = "104.198.16.33:10086"; #endif if (string.IsNullOrEmpty(brokers)) { GlobalLogger.WriteLogError("KAFKA_BROKERS env var not set."); Environment.Exit(2); } var producerConfig = new Dictionary <string, object>() { { "bootstrap.servers", brokers }, { "group.id", "dotnet_speed_test" }, { "client.id", Environment.MachineName }, { "acks", "all" }, { "compression.codec", "gzip" }, { "message.max.bytes", 500 * 1048576 } }; var consumerConfig = new Dictionary <string, object>() { // Add extra config in an effort to make this thing consume messages. Grrrrr. //{ "group.id", Guid.NewGuid() }, { "bootstrap.servers", brokers }, { "group.id", "dotnet_speed_test" }, { "enable.auto.commit", true }, { "auto.commit.interval.ms", 5000 }, { "default.topic.config", new Dictionary <string, object>() { { "auto.offset.reset", "smallest" } } } }; var prodSw = new Stopwatch(); if (load) { topic = "load_test_dotnet"; GlobalLogger.WriteLogInfo($"Producing {num} messages to topic {topic}"); var count = 0; prodSw.Start(); using (var producer = new Producer(producerConfig)) { Task.Run(() => { while (count < num) { var deliveryReport = producer.ProduceAsync(topic, null, Encoding.ASCII.GetBytes(DateTime.UtcNow.ToLongDateString())); count++; } producer.Flush(TimeSpan.FromSeconds(10)); }).Wait(); // TODO: Add delivery callback. Consider sending in batches, checking returns to make sure no errors occurred. } prodSw.Stop(); GlobalLogger.WriteLogInfo($"Done producing."); var msgPerSec = count / prodSw.ElapsedMilliseconds; GlobalLogger.WriteLogInfo($"{count} messages produced in {prodSw.ElapsedMilliseconds} ms. Rate: {msgPerSec} msgs/ms."); using (var consumer = new Consumer(consumerConfig)) { var keepGoing = true; var assigned = false; var messagesRead = 0; consumer.OnMessage += (_, message) => { // GlobalLogger.WriteLogDebug($"Message received. That makes {messagesRead}."); messagesRead++; }; consumer.OnPartitionEOF += (_, partition) => { keepGoing = false; }; consumer.OnPartitionsAssigned += (_, partitions) => { GlobalLogger.WriteLogInfo($"Assigned partitions: [{string.Join(", ", partitions)}]"); var offsets = consumer.QueryWatermarkOffsets(partitions[0]); GlobalLogger.WriteLogDebug($"offsets -- High: {offsets.High}, Low: {offsets.Low}"); //List<TopicPartitionOffset> assignments = new List<TopicPartitionOffset>(); //foreach (var part in partitions) //{ // var offsets = consumer.QueryWatermarkOffsets(part, TimeSpan.FromMilliseconds(1000)); // GlobalLogger.WriteLogDebug($"offsets -- High: {offsets.High}, Low: {offsets.Low}"); // assignments.Add(new TopicPartitionOffset(part, 11005290)); //} //consumer.Assign(assignments.ToArray()); consumer.Assign(partitions); assigned = true; }; consumer.OnError += (_, error) => { GlobalLogger.WriteLogError(error.Reason); }; consumer.OnConsumeError += (_, error) => { GlobalLogger.WriteLogError(error.Error.Reason); }; consumer.Subscribe(new List <string> { topic }); GlobalLogger.WriteLogInfo("Waiting for topic assignment."); while (!assigned) { consumer.Poll(100); Thread.Sleep(100); } Thread.Sleep(1000); prodSw.Reset(); prodSw.Start(); while (keepGoing) { consumer.Poll(100); Thread.Sleep(100); } prodSw.Stop(); if (messagesRead == 0) { GlobalLogger.WriteLogError($"{messagesRead} messages read. Something's broken."); return; } GlobalLogger.WriteLogInfo($"{messagesRead} messages consumed in {prodSw.ElapsedMilliseconds} ms. Rate {prodSw.ElapsedMilliseconds / messagesRead} msgs/ms."); } GlobalLogger.WriteLogInfo("Done."); return; } using (var kafka = new Kafka(consumerConfig, producerConfig, GlobalLogger)) { kafka.Cons.Subscribe(new List <string> { topic }); while (!kafka.ConsumerAssigned) { kafka.Cons.Poll(100); Thread.Sleep(100); } #region primeThePump GlobalLogger.WriteLogInfo("Priming Kafka."); var user = new User("PRIMING", 1, "PEUCE"); var serialized = AvroSerializeMessages(new List <User>() { user }, serializer, GlobalLogger, ref taskSw); kafka.ProduceMessages(serialized, topic); while (kafka.ReceivedBytes.Count() < 1) { kafka.Cons.Poll(100); Thread.Sleep(100); } kafka.ReceivedBytes.ClearQueue(); kafka.MessagesRead = 0; GlobalLogger.WriteLogInfo("Kafka primed."); #endregion primeThePump globalSw.Start(); GlobalLogger.WriteLogInfo($"Starting test pass with {num} messages."); if (multithreaded) { var userQueue = new ThreadSafeQueue <User>(); var serializedQueue = new ThreadSafeQueue <byte[]>(); var deserializedQueue = new ThreadSafeQueue <User>(); var countDown = new CountdownEvent(1); var taskList = new List <ThreadedTaskWorker>() { new AvroSerializerTask(GlobalLogger, countDown, userQueue, kafka, num), new KafkaWriteTask(GlobalLogger, countDown, kafka, topic, 100, num), new KafkaReadThread(GlobalLogger, countDown, kafka, topic, num), new AvroDeserializeTask(GlobalLogger, countDown, kafka, deserializedQueue, serializer, num) }; var threads = new List <Thread>(); Console.CancelKeyPress += (sender, eventArgs) => { GlobalLogger.WriteLogInfo("Stopping tasks."); foreach (var task in taskList) { task.Stop(); } foreach (var thr in threads) { thr.Join(2000); if (thr.ThreadState == System.Threading.ThreadState.Running) { thr.Abort(); } } }; foreach (var task in taskList) { countDown.AddCount(); var nThread = new Thread(task.Run); nThread.IsBackground = true; threads.Add(nThread); nThread.Start(); task.Start(); } countDown.Signal(); var rand = new Random(); for (int i = 0; i < num; i++) { userQueue.Enqueue(new User(GetRandomString(10), rand.Next(100), GetRandomColor())); } countDown.Wait(); } else { var users = CreateUserList(num, GlobalLogger, ref taskSw); var msgs = AvroSerializeMessages(users, serializer, GlobalLogger, ref taskSw); SendMessagesToKafka(kafka, msgs, topic, GlobalLogger, ref taskSw); var received = ReadMessagesFromKafka(kafka, topic, num, GlobalLogger, ref taskSw); var deserialized = AvroDeserializeMessages(received, serializer, GlobalLogger, ref taskSw); } } globalSw.Stop(); GlobalLogger.WriteLogInfo($"Script run time: {globalSw.ElapsedMilliseconds} ms."); }