public Kafka(Dictionary <string, object> consumerConfig, Dictionary <string, object> producerConfig, Logger log)
 {
     ConsumerConfig  = consumerConfig;
     ProducerConfig  = producerConfig;
     Log             = log;
     Prod            = new Producer <Null, byte[]>(ProducerConfig, new NullSerializer(), new DoNothingSerializer());
     Cons            = new Consumer <Null, byte[]>(ConsumerConfig, new NullDeserializer(), new DoNothingSerializer());
     Cons.OnMessage += (_, message) =>
     {
         ReceivedBytes.Enqueue(message.Value);
         MessagesRead++;
     };
     Cons.OnPartitionsAssigned += (_, partitions) =>
     {
         var assignedParts = string.Join(", ", partitions);
         if (!string.IsNullOrEmpty(assignedParts))
         {
             Cons.Assign(partitions);
             ConsumerAssigned = true;
             Log.WriteLogInfo($"Partitions assigned: {assignedParts}");
         }
         else
         {
             Log.WriteLogInfo($"Partitions was null or empty. Continuing to poll.");
         }
     };
     Cons.OnError += (_, error) =>
     {
         Log.WriteLogError(error.ToString());
     };
     Cons.OnConsumeError += (_, error) =>
     {
         Log.WriteLogError(error.ToString());
     };
     ReceivedBytes  = new ThreadSafeQueue <byte[]>();
     ToProduceBytes = new ThreadSafeQueue <byte[]>();
     UserQueue      = new ThreadSafeQueue <User>();
 }
Exemple #2
0
        static void Main(string[] args)
        {
            System.Threading.ThreadPool.SetMinThreads(50, 30);
            var globalSw      = new Stopwatch();
            var taskSw        = new Stopwatch();
            var topic         = "speed_test_dotnet";
            var multithreaded = false;
            var load          = false;
            int num           = 1000;

            #region parseCommandLineArgs
            if (args.Length > 0)
            {
                foreach (var arg in args)
                {
                    if (arg.Contains("mt"))
                    {
                        multithreaded = true;
                    }
                    else if (arg.Contains("load"))
                    {
                        load = true;
                    }
                    else if (!int.TryParse(arg, out num))
                    {
                        GlobalLogger.WriteLogError($"Argument {arg} is not valid.");
                        PrintUsage();
                        Environment.Exit(2);
                    }
                }
            }
            #endregion parseCommandLineArgs

            var serializer = new AvroSerializer(User._SCHEMA);
            var brokers    = System.Environment.GetEnvironmentVariable("KAFKA_BROKERS");
#if DEBUG
            brokers = "104.198.16.33:10086";
#endif

            if (string.IsNullOrEmpty(brokers))
            {
                GlobalLogger.WriteLogError("KAFKA_BROKERS env var not set.");
                Environment.Exit(2);
            }
            var producerConfig = new Dictionary <string, object>()
            {
                { "bootstrap.servers", brokers },
                { "group.id", "dotnet_speed_test" },
                { "client.id", Environment.MachineName },
                { "acks", "all" },
                { "compression.codec", "gzip" },
                { "message.max.bytes", 500 * 1048576 }
            };
            var consumerConfig = new Dictionary <string, object>()
            {
                // Add extra config in an effort to make this thing consume messages. Grrrrr.
                //{ "group.id", Guid.NewGuid() },
                { "bootstrap.servers", brokers },
                { "group.id", "dotnet_speed_test" },
                { "enable.auto.commit", true },
                { "auto.commit.interval.ms", 5000 },
                { "default.topic.config", new Dictionary <string, object>()
                  {
                      { "auto.offset.reset", "smallest" }
                  } }
            };

            var prodSw = new Stopwatch();
            if (load)
            {
                topic = "load_test_dotnet";
                GlobalLogger.WriteLogInfo($"Producing {num} messages to topic {topic}");
                var count = 0;
                prodSw.Start();

                using (var producer = new Producer(producerConfig))
                {
                    Task.Run(() =>
                    {
                        while (count < num)
                        {
                            var deliveryReport = producer.ProduceAsync(topic, null, Encoding.ASCII.GetBytes(DateTime.UtcNow.ToLongDateString()));
                            count++;
                        }
                        producer.Flush(TimeSpan.FromSeconds(10));
                    }).Wait();
                    // TODO: Add delivery callback. Consider sending in batches, checking returns to make sure no errors occurred.
                }
                prodSw.Stop();
                GlobalLogger.WriteLogInfo($"Done producing.");
                var msgPerSec = count / prodSw.ElapsedMilliseconds;
                GlobalLogger.WriteLogInfo($"{count} messages produced in {prodSw.ElapsedMilliseconds} ms. Rate: {msgPerSec} msgs/ms.");

                using (var consumer = new Consumer(consumerConfig))
                {
                    var keepGoing    = true;
                    var assigned     = false;
                    var messagesRead = 0;

                    consumer.OnMessage += (_, message) =>
                    {
                        // GlobalLogger.WriteLogDebug($"Message received. That makes {messagesRead}.");
                        messagesRead++;
                    };
                    consumer.OnPartitionEOF += (_, partition) =>
                    {
                        keepGoing = false;
                    };
                    consumer.OnPartitionsAssigned += (_, partitions) =>
                    {
                        GlobalLogger.WriteLogInfo($"Assigned partitions: [{string.Join(", ", partitions)}]");
                        var offsets = consumer.QueryWatermarkOffsets(partitions[0]);
                        GlobalLogger.WriteLogDebug($"offsets -- High: {offsets.High}, Low: {offsets.Low}");

                        //List<TopicPartitionOffset> assignments = new List<TopicPartitionOffset>();
                        //foreach (var part in partitions)
                        //{
                        //    var offsets = consumer.QueryWatermarkOffsets(part, TimeSpan.FromMilliseconds(1000));
                        //    GlobalLogger.WriteLogDebug($"offsets -- High: {offsets.High}, Low: {offsets.Low}");
                        //    assignments.Add(new TopicPartitionOffset(part, 11005290));
                        //}
                        //consumer.Assign(assignments.ToArray());

                        consumer.Assign(partitions);
                        assigned = true;
                    };
                    consumer.OnError += (_, error) =>
                    {
                        GlobalLogger.WriteLogError(error.Reason);
                    };
                    consumer.OnConsumeError += (_, error) =>
                    {
                        GlobalLogger.WriteLogError(error.Error.Reason);
                    };

                    consumer.Subscribe(new List <string> {
                        topic
                    });
                    GlobalLogger.WriteLogInfo("Waiting for topic assignment.");
                    while (!assigned)
                    {
                        consumer.Poll(100);
                        Thread.Sleep(100);
                    }
                    Thread.Sleep(1000);

                    prodSw.Reset();
                    prodSw.Start();
                    while (keepGoing)
                    {
                        consumer.Poll(100);
                        Thread.Sleep(100);
                    }
                    prodSw.Stop();
                    if (messagesRead == 0)
                    {
                        GlobalLogger.WriteLogError($"{messagesRead} messages read. Something's broken.");
                        return;
                    }
                    GlobalLogger.WriteLogInfo($"{messagesRead} messages consumed in {prodSw.ElapsedMilliseconds} ms. Rate {prodSw.ElapsedMilliseconds / messagesRead} msgs/ms.");
                }
                GlobalLogger.WriteLogInfo("Done.");
                return;
            }

            using (var kafka = new Kafka(consumerConfig, producerConfig, GlobalLogger))
            {
                kafka.Cons.Subscribe(new List <string> {
                    topic
                });
                while (!kafka.ConsumerAssigned)
                {
                    kafka.Cons.Poll(100);
                    Thread.Sleep(100);
                }

                #region primeThePump
                GlobalLogger.WriteLogInfo("Priming Kafka.");
                var user       = new User("PRIMING", 1, "PEUCE");
                var serialized = AvroSerializeMessages(new List <User>()
                {
                    user
                }, serializer, GlobalLogger, ref taskSw);
                kafka.ProduceMessages(serialized, topic);
                while (kafka.ReceivedBytes.Count() < 1)
                {
                    kafka.Cons.Poll(100);
                    Thread.Sleep(100);
                }
                kafka.ReceivedBytes.ClearQueue();
                kafka.MessagesRead = 0;
                GlobalLogger.WriteLogInfo("Kafka primed.");
                #endregion primeThePump

                globalSw.Start();
                GlobalLogger.WriteLogInfo($"Starting test pass with {num} messages.");
                if (multithreaded)
                {
                    var userQueue         = new ThreadSafeQueue <User>();
                    var serializedQueue   = new ThreadSafeQueue <byte[]>();
                    var deserializedQueue = new ThreadSafeQueue <User>();

                    var countDown = new CountdownEvent(1);

                    var taskList = new List <ThreadedTaskWorker>()
                    {
                        new AvroSerializerTask(GlobalLogger, countDown, userQueue, kafka, num),
                        new KafkaWriteTask(GlobalLogger, countDown, kafka, topic, 100, num),
                        new KafkaReadThread(GlobalLogger, countDown, kafka, topic, num),
                        new AvroDeserializeTask(GlobalLogger, countDown, kafka, deserializedQueue, serializer, num)
                    };
                    var threads = new List <Thread>();
                    Console.CancelKeyPress += (sender, eventArgs) =>
                    {
                        GlobalLogger.WriteLogInfo("Stopping tasks.");
                        foreach (var task in taskList)
                        {
                            task.Stop();
                        }
                        foreach (var thr in threads)
                        {
                            thr.Join(2000);
                            if (thr.ThreadState == System.Threading.ThreadState.Running)
                            {
                                thr.Abort();
                            }
                        }
                    };
                    foreach (var task in taskList)
                    {
                        countDown.AddCount();
                        var nThread = new Thread(task.Run);
                        nThread.IsBackground = true;
                        threads.Add(nThread);
                        nThread.Start();
                        task.Start();
                    }
                    countDown.Signal();
                    var rand = new Random();

                    for (int i = 0; i < num; i++)
                    {
                        userQueue.Enqueue(new User(GetRandomString(10), rand.Next(100), GetRandomColor()));
                    }
                    countDown.Wait();
                }
                else
                {
                    var users = CreateUserList(num, GlobalLogger, ref taskSw);
                    var msgs  = AvroSerializeMessages(users, serializer, GlobalLogger, ref taskSw);
                    SendMessagesToKafka(kafka, msgs, topic, GlobalLogger, ref taskSw);
                    var received     = ReadMessagesFromKafka(kafka, topic, num, GlobalLogger, ref taskSw);
                    var deserialized = AvroDeserializeMessages(received, serializer, GlobalLogger, ref taskSw);
                }
            }
            globalSw.Stop();
            GlobalLogger.WriteLogInfo($"Script run time: {globalSw.ElapsedMilliseconds} ms.");
        }
Exemple #3
0
 public AvroDeserializeTask(ILogger logger, CountdownEvent countDownEvent, Kafka kafka, ThreadSafeQueue <User> deserializedQueue, AvroSerializer serializer, int numExpected)
     : base(logger, countDownEvent)
 {
     Log               = logger as Logger;
     KafkaConnection   = kafka;
     DeserializedQueue = deserializedQueue;
     Serializer        = serializer;
     NumExpected       = numExpected;
 }