private static bool ConsumeDataOfOnePartition <TKey, TData>(KafkaSimpleManager <TKey, TData> kafkaSimpleManager, int partitionID, long offsetBase, long earliest, long latest, ConsumeDataHelperArguments dumpdataOptions) { Random rand = new Random(); StringBuilder sb = new StringBuilder(); using (FileStream fs = File.Open(dumpdataOptions.File, FileMode.Append, FileAccess.Write, FileShare.Read)) { using (StreamWriter sw = new StreamWriter(fs)) { #region repeatly consume and dump data long offsetLast = -1; long l = 0; sw.WriteLine("Will read partition {0} from {1}. Earliese:{2} latest:{3} ", partitionID, offsetBase, earliest, latest); Logger.InfoFormat("Will read partition {0} from {1}. Earliese:{2} latest:{3} ", partitionID, offsetBase, earliest, latest); using (Consumer consumer = kafkaSimpleManager.GetConsumer(dumpdataOptions.Topic, partitionID)) { while (true) { correlationID++; List <MessageAndOffset> listMessageAndOffsets = ConsumeSimpleHelper.FetchAndGetMessageAndOffsetList(consumer , correlationID++, dumpdataOptions.Topic, partitionID, offsetBase, consumer.Config.FetchSize, kafkaSimpleManager.Config.MaxWaitTime, kafkaSimpleManager.Config.MinWaitBytes); if (listMessageAndOffsets == null) { Logger.Error("PullMessage got null List<MessageAndOffset>, please check log for detail."); break; } else { #region dump response.Payload if (listMessageAndOffsets.Any()) { offsetLast = listMessageAndOffsets.Last().MessageOffset; totalCount += listMessageAndOffsets.Count; KafkaConsoleUtil.DumpDataToFile(dumpdataOptions.DumpDataAsUTF8, dumpdataOptions.DumpBinaryData, sw, fs, listMessageAndOffsets, dumpdataOptions.Count, offsetBase, ref totalCountUTF8, ref totalCountOriginal); sw.WriteLine("Finish read partition {0} to {1}. Earliese:{2} latest:{3} ", partitionID, offsetLast, earliest, latest); Logger.InfoFormat("Finish read partition {0} to {1}. Earliese:{2} latest:{3} ", partitionID, offsetLast, earliest, latest); offsetBase = offsetLast + 1; if (totalCount - lastNotifytotalCount > 1000) { Console.WriteLine("Partition: {0} totally read {1} will continue read from {2}", partitionID, totalCount, offsetBase); lastNotifytotalCount = totalCount; } } else { if (offsetBase == latest) { sw.WriteLine("Hit end of queue."); } sw.WriteLine("Finish read partition {0} to {1}. Earliese:{2} latest:{3} ", partitionID, offsetLast, earliest, latest); Logger.InfoFormat("Finish read partition {0} to {1}. Earliese:{2} latest:{3} ", partitionID, offsetLast, earliest, latest); Console.WriteLine("Partition: {0} totally read {1} Hit end of queue {2}", partitionID, totalCount, offsetBase); break; } Thread.Sleep(1000); #endregion } l++; if (totalCount >= dumpdataOptions.Count && dumpdataOptions.Count > 0) { return(true); } } } #endregion Logger.InfoFormat("Topic:{0} Partitoin:{1} Finish Read. Earliest:{2} Latest:{3}, totalCount:{4} " , dumpdataOptions.Topic, partitionID, earliest, latest, totalCount); sw.WriteLine("Topic:{0} Partitoin:{1} Finish Read. Earliest:{2} Latest:{3}, totalCount:{4} \r\n " , dumpdataOptions.Topic, partitionID, earliest, latest, totalCount); } } if (totalCount >= dumpdataOptions.Count && dumpdataOptions.Count > 0) { return(true); } else { return(false); } }
public static void MainInternal(int taskIndexInStorm, string[] args) { GenerateAssistFile("producesimple"); GenerateAssistFile("produceperftest"); GenerateAssistFile("producemonitor"); GenerateAssistFile("eventserverperftest"); GenerateAssistFile("consumesimple"); GenerateAssistFile("consumegroup"); GenerateAssistFile("consumegroupmonitor"); GenerateAssistFile("topic"); ServicePointManager.DefaultConnectionLimit = 5000; ServicePointManager.UseNagleAlgorithm = false; var log4netSection = ConfigurationManager.GetSection("log4net"); if (log4netSection != null) { //XmlConfigurator.Configure(); } KafkaNETExampleCommandVerb commandOptions = new KafkaNETExampleCommandVerb(); try { commandOptions.Parse(args); } catch (Exception e) { Logger.ErrorFormat("{0}", e.FormatException()); Console.WriteLine(KafkaNETExampleCommandVerb.GetUsage()); Environment.Exit(-1); } KafkaNETExampleSubArguments realOption = KafkaNETExampleCommandVerb.ActiveSubOption; try { realOption.Parse(args); } catch (Exception e) { Logger.ErrorFormat("{0}", e.FormatException()); Console.WriteLine(realOption.GetUsage(false)); Environment.Exit(-1); } Logger.InfoFormat("All arguments of {0}: \r\n{1}", KafkaNETExampleCommandVerb.AssemblyName, realOption.GetArgDict()); switch (KafkaNETExampleCommandVerb.Verb) { case "producesimple": case "produceroundrobin": ProduceSimpleHelperOption produceroundrobinOptions = (ProduceSimpleHelperOption)realOption; ProduceSimpleHelper.Run(produceroundrobinOptions); break; case "produceperftest": case "producewrapper": ProducePerfTestHelperOption producewrapperOption = (ProducePerfTestHelperOption)realOption; (new ProducePerfTestHelper()).Run(producewrapperOption); break; case "producem": case "producemonitor": ProduceMonitorHelperOptions produceMonitorOptions = (ProduceMonitorHelperOptions)realOption; ProduceMonitorHelper.Run(produceMonitorOptions); break; case "eventserverperftest": JavaEventServerPerfTestHelperOptions evetServerPerfTestOptions = (JavaEventServerPerfTestHelperOptions)realOption; (new JavaEventServerPerfTestHelper()).Run(evetServerPerfTestOptions); break; case "consumesimple": case "dumpdata": ConsumeDataHelperArguments dumpdataOptions = (ConsumeDataHelperArguments)realOption; ConsumeSimpleHelper.ConsumeDataSimple(dumpdataOptions); break; case "consumegroup": case "dumpdataasconsumergroup": ConsumeGroupHelperOptions cgOptions = (ConsumeGroupHelperOptions)realOption; if (taskIndexInStorm >= 0) { cgOptions.ConsumerId = cgOptions.ConsumerId + taskIndexInStorm.ToString(); cgOptions.File = cgOptions.ConsumerId + taskIndexInStorm.ToString() + cgOptions.File; } ConsumerGroupHelper.DumpMessageAsConsumerGroup(cgOptions); break; case "latestoffsetofconsumergroup": case "consumegroupmonitor": case "consumegroupm": case "consumem": ConsumeGroupMonitorHelperOptions dcgOptions = (ConsumeGroupMonitorHelperOptions)realOption; ConsumeGroupMonitorHelper.DumpConsumerGroupOffsets(dcgOptions); break; case "topic": TopicHelperArguments dtOptions = (TopicHelperArguments)realOption; TopicHelper.DumpTopicMetadataAndOffset(dtOptions); break; case "test": var testOptions = (TestHelperOptions)realOption; TestHelper.Run(testOptions); break; default: Logger.Error(string.Format("Invalid verb={0}", KafkaNETExampleCommandVerb.Verb)); return; } }