internal void Parse(string[] args) { if (args == null || args.Length <= 0) { throw new ArgumentException("Please provide verb."); } Verb = args[0].ToLowerInvariant(); if ("topic" == Verb.ToLowerInvariant()) ActiveSubOption = new TopicHelperArguments(); else if ("dumpdata" == Verb.ToLowerInvariant() || KafkaNETExampleType.ConsumeSimple.ToString().ToLowerInvariant() == Verb.ToLowerInvariant()) ActiveSubOption = new ConsumeDataHelperArguments(); else if ("dumpdataasconsumergroup" == Verb.ToLowerInvariant() || KafkaNETExampleType.ConsumeGroup.ToString().ToLowerInvariant() == Verb.ToLowerInvariant()) ActiveSubOption = new ConsumeGroupHelperOptions(); else if ("latestoffsetofconsumergroup" == Verb.ToLowerInvariant() || "consumegroupm" == Verb.ToLowerInvariant() || "consumem" == Verb.ToLowerInvariant() || KafkaNETExampleType.ConsumeGroupMonitor.ToString().ToLowerInvariant() == Verb.ToLowerInvariant()) ActiveSubOption = new ConsumeGroupMonitorHelperOptions(); else if ("produceroundrobin" == Verb.ToLowerInvariant() || KafkaNETExampleType.ProduceSimple.ToString().ToLowerInvariant() == Verb.ToLowerInvariant()) ActiveSubOption = new ProduceSimpleHelperOption(); else if ("test" == Verb.ToLowerInvariant()) ActiveSubOption = new TestHelperOptions(); else if ("producewrapper" == Verb.ToLowerInvariant() || KafkaNETExampleType.ProducePerfTest.ToString().ToLowerInvariant() == Verb.ToLowerInvariant()) ActiveSubOption = new ProducePerfTestHelperOption(); else if ("producem" == Verb.ToLowerInvariant() || KafkaNETExampleType.ProduceMonitor.ToString().ToLowerInvariant() == Verb.ToLowerInvariant()) ActiveSubOption = new ProduceMonitorHelperOptions(); else if (KafkaNETExampleType.EventServerPerfTest.ToString().ToLowerInvariant() == Verb.ToLowerInvariant()) ActiveSubOption = new JavaEventServerPerfTestHelperOptions(); else { throw new ArgumentException(string.Format("The command verb {0} is not recoganized.", Verb)); } }
internal static void ConsumeDataSimple(ConsumeDataHelperArguments dumpdataOptions) { correlationID = 0; totalCountUTF8 = 0; totalCountOriginal = 0; totalCount = 0; lastNotifytotalCount = 0; KafkaSimpleManagerConfiguration config = new KafkaSimpleManagerConfiguration() { FetchSize = dumpdataOptions.FetchSize, BufferSize = dumpdataOptions.BufferSize, MaxWaitTime = dumpdataOptions.MaxWaitTime, MinWaitBytes = dumpdataOptions.MinWaitBytes, Zookeeper = dumpdataOptions.Zookeeper }; config.Verify(); bool finish = false; try { using (KafkaSimpleManager <int, Message> kafkaSimpleManager = new KafkaSimpleManager <int, Message>(config)) { TopicMetadata topicMetadata = kafkaSimpleManager.RefreshMetadata(0, ClientID, correlationID++, dumpdataOptions.Topic, true); while (true) { try { for (int i = 0; i <= topicMetadata.PartitionsMetadata.Max(r => r.PartitionId); i++) { if (dumpdataOptions.PartitionIndex == -1 || i == dumpdataOptions.PartitionIndex) { #region Get real offset and adjust long earliest = 0; long latest = 0; long offsetBase = 0; OffsetHelper.GetAdjustedOffset <int, Message>(dumpdataOptions.Topic , kafkaSimpleManager, i , KafkaNetLibraryExample.ConvertOffsetType(dumpdataOptions.Offset) , KafkaNetLibraryExample.ConvertOffset(dumpdataOptions.Offset) , dumpdataOptions.LastMessagesCount, out earliest, out latest, out offsetBase); #endregion Console.WriteLine("Topic:{0} Partition:{1} will read from {2} earliest:{3} latest:{4}", dumpdataOptions.Topic, i, offsetBase, earliest, latest); finish = ConsumeDataOfOnePartition(kafkaSimpleManager, i, offsetBase, earliest, latest, dumpdataOptions); if (finish) { break; } } } finish = true; } catch (Exception ex) { Logger.ErrorFormat("ConsumeDataSimple Got exception, will refresh metadata. {0}", ex.FormatException()); kafkaSimpleManager.RefreshMetadata(0, ClientID, correlationID++, dumpdataOptions.Topic, true); } if (finish) { break; } } } Logger.InfoFormat("Topic:{0} Finish Read. totalCount:{1} ", dumpdataOptions.Topic, totalCount); } catch (Exception ex) { Logger.ErrorFormat("ConsumeDataSimple Got exception:{0}\r\ninput parameter: {1}", ex.FormatException(), dumpdataOptions.ToString()); } }
private static bool ConsumeDataOfOnePartition <TKey, TData>(KafkaSimpleManager <TKey, TData> kafkaSimpleManager, int partitionID, long offsetBase, long earliest, long latest, ConsumeDataHelperArguments dumpdataOptions) { Random rand = new Random(); StringBuilder sb = new StringBuilder(); using (FileStream fs = File.Open(dumpdataOptions.File, FileMode.Append, FileAccess.Write, FileShare.Read)) { using (StreamWriter sw = new StreamWriter(fs)) { #region repeatly consume and dump data long offsetLast = -1; long l = 0; sw.WriteLine("Will read partition {0} from {1}. Earliese:{2} latest:{3} ", partitionID, offsetBase, earliest, latest); Logger.InfoFormat("Will read partition {0} from {1}. Earliese:{2} latest:{3} ", partitionID, offsetBase, earliest, latest); using (Consumer consumer = kafkaSimpleManager.GetConsumer(dumpdataOptions.Topic, partitionID)) { while (true) { correlationID++; List <MessageAndOffset> listMessageAndOffsets = ConsumeSimpleHelper.FetchAndGetMessageAndOffsetList(consumer , correlationID++, dumpdataOptions.Topic, partitionID, offsetBase, consumer.Config.FetchSize, kafkaSimpleManager.Config.MaxWaitTime, kafkaSimpleManager.Config.MinWaitBytes); if (listMessageAndOffsets == null) { Logger.Error("PullMessage got null List<MessageAndOffset>, please check log for detail."); break; } else { #region dump response.Payload if (listMessageAndOffsets.Any()) { offsetLast = listMessageAndOffsets.Last().MessageOffset; totalCount += listMessageAndOffsets.Count; KafkaConsoleUtil.DumpDataToFile(dumpdataOptions.DumpDataAsUTF8, dumpdataOptions.DumpBinaryData, sw, fs, listMessageAndOffsets, dumpdataOptions.Count, offsetBase, ref totalCountUTF8, ref totalCountOriginal); sw.WriteLine("Finish read partition {0} to {1}. Earliese:{2} latest:{3} ", partitionID, offsetLast, earliest, latest); Logger.InfoFormat("Finish read partition {0} to {1}. Earliese:{2} latest:{3} ", partitionID, offsetLast, earliest, latest); offsetBase = offsetLast + 1; if (totalCount - lastNotifytotalCount > 1000) { Console.WriteLine("Partition: {0} totally read {1} will continue read from {2}", partitionID, totalCount, offsetBase); lastNotifytotalCount = totalCount; } } else { if (offsetBase == latest) { sw.WriteLine("Hit end of queue."); } sw.WriteLine("Finish read partition {0} to {1}. Earliese:{2} latest:{3} ", partitionID, offsetLast, earliest, latest); Logger.InfoFormat("Finish read partition {0} to {1}. Earliese:{2} latest:{3} ", partitionID, offsetLast, earliest, latest); Console.WriteLine("Partition: {0} totally read {1} Hit end of queue {2}", partitionID, totalCount, offsetBase); break; } Thread.Sleep(1000); #endregion } l++; if (totalCount >= dumpdataOptions.Count && dumpdataOptions.Count > 0) { return(true); } } } #endregion Logger.InfoFormat("Topic:{0} Partitoin:{1} Finish Read. Earliest:{2} Latest:{3}, totalCount:{4} " , dumpdataOptions.Topic, partitionID, earliest, latest, totalCount); sw.WriteLine("Topic:{0} Partitoin:{1} Finish Read. Earliest:{2} Latest:{3}, totalCount:{4} \r\n " , dumpdataOptions.Topic, partitionID, earliest, latest, totalCount); } } if (totalCount >= dumpdataOptions.Count && dumpdataOptions.Count > 0) { return(true); } else { return(false); } }
public static void MainInternal(int taskIndexInStorm, string[] args) { GenerateAssistFile("producesimple"); GenerateAssistFile("produceperftest"); GenerateAssistFile("producemonitor"); GenerateAssistFile("eventserverperftest"); GenerateAssistFile("consumesimple"); GenerateAssistFile("consumegroup"); GenerateAssistFile("consumegroupmonitor"); GenerateAssistFile("topic"); ServicePointManager.DefaultConnectionLimit = 5000; ServicePointManager.UseNagleAlgorithm = false; var log4netSection = ConfigurationManager.GetSection("log4net"); if (log4netSection != null) { //XmlConfigurator.Configure(); } KafkaNETExampleCommandVerb commandOptions = new KafkaNETExampleCommandVerb(); try { commandOptions.Parse(args); } catch (Exception e) { Logger.ErrorFormat("{0}", e.FormatException()); Console.WriteLine(KafkaNETExampleCommandVerb.GetUsage()); Environment.Exit(-1); } KafkaNETExampleSubArguments realOption = KafkaNETExampleCommandVerb.ActiveSubOption; try { realOption.Parse(args); } catch (Exception e) { Logger.ErrorFormat("{0}", e.FormatException()); Console.WriteLine(realOption.GetUsage(false)); Environment.Exit(-1); } Logger.InfoFormat("All arguments of {0}: \r\n{1}", KafkaNETExampleCommandVerb.AssemblyName, realOption.GetArgDict()); switch (KafkaNETExampleCommandVerb.Verb) { case "producesimple": case "produceroundrobin": ProduceSimpleHelperOption produceroundrobinOptions = (ProduceSimpleHelperOption)realOption; ProduceSimpleHelper.Run(produceroundrobinOptions); break; case "produceperftest": case "producewrapper": ProducePerfTestHelperOption producewrapperOption = (ProducePerfTestHelperOption)realOption; (new ProducePerfTestHelper()).Run(producewrapperOption); break; case "producem": case "producemonitor": ProduceMonitorHelperOptions produceMonitorOptions = (ProduceMonitorHelperOptions)realOption; ProduceMonitorHelper.Run(produceMonitorOptions); break; case "eventserverperftest": JavaEventServerPerfTestHelperOptions evetServerPerfTestOptions = (JavaEventServerPerfTestHelperOptions)realOption; (new JavaEventServerPerfTestHelper()).Run(evetServerPerfTestOptions); break; case "consumesimple": case "dumpdata": ConsumeDataHelperArguments dumpdataOptions = (ConsumeDataHelperArguments)realOption; ConsumeSimpleHelper.ConsumeDataSimple(dumpdataOptions); break; case "consumegroup": case "dumpdataasconsumergroup": ConsumeGroupHelperOptions cgOptions = (ConsumeGroupHelperOptions)realOption; if (taskIndexInStorm >= 0) { cgOptions.ConsumerId = cgOptions.ConsumerId + taskIndexInStorm.ToString(); cgOptions.File = cgOptions.ConsumerId + taskIndexInStorm.ToString() + cgOptions.File; } ConsumerGroupHelper.DumpMessageAsConsumerGroup(cgOptions); break; case "latestoffsetofconsumergroup": case "consumegroupmonitor": case "consumegroupm": case "consumem": ConsumeGroupMonitorHelperOptions dcgOptions = (ConsumeGroupMonitorHelperOptions)realOption; ConsumeGroupMonitorHelper.DumpConsumerGroupOffsets(dcgOptions); break; case "topic": TopicHelperArguments dtOptions = (TopicHelperArguments)realOption; TopicHelper.DumpTopicMetadataAndOffset(dtOptions); break; case "test": var testOptions = (TestHelperOptions)realOption; TestHelper.Run(testOptions); break; default: Logger.Error(string.Format("Invalid verb={0}", KafkaNETExampleCommandVerb.Verb)); return; } }