private static void Consume(string broker, string topic) { var options = new KafkaOptions(new Uri(broker)); var router = new BrokerRouter(options); var consumer = new Consumer(new ConsumerOptions(topic, router)); //Consume returns a blocking IEnumerable(ie: never ending stream) foreach (var message in consumer.Consume()) { Console.WriteLine($"Response: Partition {message.Meta.PartitionId}, Offset {message.Meta.Offset}:{message.Value.ToUtf8String()}"); } }
static void Main(string[] args) { var options = new KafkaOptions(new Uri("http://sjkap556:9092")); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); for (int i = 0; i < 1; i++) { client.SendMessageAsync("testCockpit", new[] { new Message(DateTime.Now + " -- Teste: " + i) }).Wait(); } Console.ReadLine(); }
static void Main(string[] args) { var options = new KafkaOptions(new Uri("http://localhost:9092")); var router = new BrokerRouter(options); var client = new Producer(router); var message = "Hello Kafka from C# - 2"; client.SendMessageAsync("TestHarness", new[] { new Message(message) }).Wait(); using (client) { } }
protected ConsumerWrapper(ILogger logger, KafkaOptions kafkaOptions, string groupId = null) { _logger = logger; Consumer = new Consumer <string, string>( CreateConsumerConfig(kafkaOptions, groupId), new StringDeserializer(Encoding.UTF8), new StringDeserializer(Encoding.UTF8)); Consumer.OnLog += OnLog; Consumer.OnError += OnError; Consumer.OnStatistics += OnStatistics; Consumer.OnConsumeError += OnConsumeError; }
public void KafkaWithFallback_ShouldThrowsException_WhenFallbackIsNull() { // Arrange var sinkConfiguration = new LoggerConfiguration().WriteTo; var formatter = new JsonFormatter(); var kafkaOptions = new KafkaOptions(new List <string> { "broker" }, "topicName"); // Act + Assert Assert.Throws <ArgumentNullException>("fallback", () => sinkConfiguration.Kafka(formatter, kafkaOptions, null, TimeSpan.Zero)); }
public Func <WorkflowAction> Execute() { var options = new KafkaOptions(WorkflowQueueActionConfig.Servers.Select(i => new Uri(i.Url)).ToArray()); var router = new BrokerRouter(options); var client = new Producer(router); client.SendMessageAsync(WorkflowQueueActionConfig.QueueName, new[] { new Message(WorkflowQueueActionConfig.QueueAction) }).Wait(); using (client) { } return(() => WorkflowActionRegistry()[WorkflowActionConfiguration().NextAction]); }
static void Main(string[] args) { //create an options file that sets up driver preferences var options = new KafkaOptions(new Uri("http://CSDKAFKA01:9092"), new Uri("http://CSDKAFKA02:9092")) { Log = new ConsoleLog() }; //start an out of process thread that runs a consumer that will write all received messages to the console Task.Factory.StartNew(() => { var consumer = new Consumer(new ConsumerOptions("TestHarness", new BrokerRouter(options))); foreach (var data in consumer.Consume()) { Console.WriteLine("Response: P{0},O{1} : {2}", data.Meta.PartitionId, data.Meta.Offset, data.Value.ToUTF8String()); } }); //create a producer to send messages with var producer = new Producer(new BrokerRouter(options)); Console.WriteLine("Type a message and press enter..."); while (true) { var message = Console.ReadLine(); if (message == "quit") { break; } if (string.IsNullOrEmpty(message)) { //special case, send multi messages quickly for (int i = 0; i < 20; i++) { producer.SendMessageAsync("TestHarness", new[] { new Message(i.ToString()) }) .ContinueWith(t => { t.Result.ForEach(x => Console.WriteLine("Complete: {0}, Offset: {1}", x.PartitionId, x.Offset)); }); } } else { producer.SendMessageAsync("TestHarness", new[] { new Message(message) }); } } using (producer) { } }
public Form1() { InitializeComponent(); //Inicializa Cliente Kafka options = new KafkaOptions(new Uri("http://stampsnet.hashtagsource.com:9092")); router = new BrokerRouter(options); client = new KafkaNet.Producer(router); //Le arquivo de entrada; leArquivoDeEntradas(); iniciaThreads(); }
public KafkaSender() { try { options = new KafkaOptions(new Uri(KAFKA_SERVER_ADDRESS)); router = new BrokerRouter(options); client = new Producer(router); isConnected = true; } catch (Exception e) { isConnected = false; } }
public void KafkaWithFallback_ShouldThrowsException_WhenFallbackTimeIsNonPositive(TimeSpan value) { // Arrange var sinkConfiguration = new LoggerConfiguration().WriteTo; var formatter = new JsonFormatter(); var kafkaOptions = new KafkaOptions(new List <string> { "broker" }, "topicName"); var fallbackSink = Mock.Of <ILogEventSink>(); // Act + Assert Assert.Throws <ArgumentOutOfRangeException>("fallbackTime", () => sinkConfiguration.Kafka(formatter, kafkaOptions, fallbackSink, value)); }
static void Main(string[] args) { string topic = "IDGTestTopic"; Uri uri = new Uri("http://localhost:9092"); var options = new KafkaOptions(uri); var router = new BrokerRouter(options); var consumer = new Consumer(new ConsumerOptions(topic, router)); foreach (var message in consumer.Consume()) { Console.WriteLine(Encoding.UTF8.GetString(message.Value)); } Console.ReadLine(); }
public async Task SendToKafka(string payload, string topic) { Message msg = new Message(payload); Uri uri = new Uri("http://localhost:9092"); var options = new KafkaOptions(uri); var router = new BrokerRouter(options); var client = new Producer(router); var response = await client.SendMessageAsync(topic, new List <Message> { msg }); client.Dispose(); //Console.WriteLine(response[0].Error); }
public KafkaConsumer(string url, string topic, int delay = 2000) { if (string.IsNullOrEmpty(url)) { throw new ArgumentNullException("Url cannot be null"); } if (string.IsNullOrEmpty(topic)) { throw new ArgumentNullException("topic cannot be null"); } _options = new KafkaOptions(new Uri(url)); _router = new BrokerRouter(_options); _consumer = new Consumer(new ConsumerOptions(topic, _router)); }
private static KafkaListener <TKey, TValue> CreateFor <TKey, TValue>( ITriggeredFunctionExecutor executor, bool singleDispatch, KafkaOptions options, string brokerList, string topic, string consumerGroup, string eventHubConnectionString, ILogger logger, string avroSchema = null) { if (typeof(ISpecificRecord).IsAssignableFrom(typeof(TValue)) || typeof(GenericRecord).IsAssignableFrom(typeof(TValue))) { if (string.IsNullOrWhiteSpace(avroSchema)) { throw new ArgumentNullException(nameof(avroSchema), $@"parameter is required when creating an Avro-based Listener"); } return(new KafkaListenerAvro <TKey, TValue>(executor, singleDispatch, options, brokerList, topic, consumerGroup, eventHubConnectionString, avroSchema, logger)); } if (typeof(Google.Protobuf.IMessage).IsAssignableFrom(typeof(TValue))) { return(new KafkaListenerProtoBuf <TKey, TValue>(executor, singleDispatch, options, brokerList, topic, consumerGroup, eventHubConnectionString, logger)); } return(new KafkaListener <TKey, TValue>(executor, singleDispatch, options, brokerList, topic, consumerGroup, eventHubConnectionString, logger)); }
public static void ConsumirMsg() { var options = new KafkaOptions(new Uri("http://localhost:9092") /*, new Uri("http://localhost:9092")*/); //var router = new BrokerRouter(options); var consumer = new KafkaNet.Consumer(new ConsumerOptions("PingPongTopic", new BrokerRouter(options))); //Consume returns a blocking IEnumerable (ie: never ending stream) foreach (var message in consumer.Consume()) { ContadorMensajesRecibidos++; Console.WriteLine("Response: P{0},O{1} : {2}, key: " + Encoding.UTF8.GetString(message.Key) + ", ConsumerTaskAccount: " + consumer.ConsumerTaskCount, message.Meta.PartitionId, message.Meta.Offset, Encoding.UTF8.GetString(message.Value)); Productor.ProducirMsg("Pong_Message", Encoding.UTF8.GetString(message.Key)); } }
public KafkaListenerForTest(ITriggeredFunctionExecutor executor, bool singleDispatch, KafkaOptions options, KafkaListenerConfiguration kafkaListenerConfiguration, object valueDeserializer, ILogger logger) : base(executor, singleDispatch, options, kafkaListenerConfiguration, valueDeserializer, logger) { }
public static void ProduceRequest(string subTopicAsPayload, string requestString) { string topic = ServiceTopics.RequestTopic; string payload = topic + subTopicAsPayload + requestString; Message msg = new Message(payload); Uri uri = new Uri("http://localhost:9092"); var options = new KafkaOptions(uri); var router = new BrokerRouter(options); var client = new KafkaNet.Producer(router); client.SendMessageAsync(topic, new List <Message> { msg }).Wait(); }
public void Run() { var options = new KafkaOptions(new Uri(ConfigurationManager.AppSettings["KAFKA_HOST"].ToString())) { Log = new ConsoleLog() }; // kafka에서 데이터를 받아온다. Task.Factory.StartNew(() => { var consumer = new Consumer(new ConsumerOptions(_topicName, new BrokerRouter(options)) { Log = new ConsoleLog() }); int count = 0; _loger.Info(DateTime.Now.ToString(), ""); foreach (var data in consumer.Consume()) { try { // kafka에서 가져온 메시지를 가공하여 가져온다. //bulk 미사용 var model = dataUtil.DataModeling(data.Value, "one"); ElsticSerachSend((ElasticDataModel)model); // bulk 사용 //dataList.Add((ElasticDataModel)dataUtil.DataModeling(data.Value, "bulk")); if (dataList.Count >= 100) { } count++; //Console.WriteLine("Response: P{0},O{1} : {2}", data.Meta.PartitionId, data.Meta.Offset, data.Value.ToUtf8String()); } catch (Exception ex) { _loger.Error(ex); } } _loger.Info(DateTime.Now.ToString() + " / " + count.ToString(), ""); }); Console.ReadLine(); }
//------------------------------------------------------------------------------------------------------------- public JsonResult SendKafka(string mesajIcerik) { try { int idLog = 0; // başta bütün id ler 0 olarak girilir hatasız gittiyse 1 e güncellenir. Message msg = new Message(mesajIcerik); Uri uri = new Uri("http://localhost:9092"); var options = new KafkaOptions(uri); var router = new BrokerRouter(options); var producer = new Producer(router); var result = producer.SendMessageAsync("mytopic", new List <Message> { msg }); using (SqlConnection sqlCon = new SqlConnection(connectionString)) { sqlCon.Open(); string query2 = "SELECT MESSAGEID FROM TBLMESSAGE WHERE MESSAGEID=(SELECT MAX(MESSAGEID) FROM TBLMESSAGE)"; //en son kaydedilen mesajın id si çekilir. SqlCommand sqlCmd2 = new SqlCommand(query2, sqlCon); Int32 newId = (Int32)sqlCmd2.ExecuteScalar(); Int32 kafkaID = 1; // kafkaId yi çekemediğimiz için 1 olarak geldiğini varsaydık. string sonuc = " "; if (newId > 0 && kafkaID > 0) // son id çekilmiş mi ve kafkadan mesaj gitmiş mi diye kontrol yapılır { sonuc = "mesajıniz başarıyla gönderildi"; string query3 = "UPDATE TBLMESSAGE SET MESSAGELOG=1 WHERE MESSAGEID=(SELECT MAX(MESSAGEID) FROM TBLMESSAGE) "; //eger if bloğunun içine girerse update sorgusu ile son id li mesajın log sütünü 1 e set edilir. SqlCommand sqlCmd3 = new SqlCommand(query3, sqlCon); idLog = (Int32)sqlCmd3.ExecuteNonQuery(); } else { sonuc = "mesaj gönderilirken hata oluştu"; } var model = new { id = newId, sonuc = sonuc }; return(Json(model, JsonRequestBehavior.AllowGet)); } } catch (Exception) { return(Json("hata")); } }
static void Main(string[] args) { var options = new KafkaOptions(new Uri("http://localhost:9092"), new Uri("http://localhost:9092")); var router = new BrokerRouter(options); var consumer = new KafkaNet.Consumer(new ConsumerOptions("desk-msg", new BrokerRouter(options))); foreach (var message in consumer.Consume()) { Console.WriteLine("Response: P{0},O{1} : {2}", message.Meta.PartitionId, message.Meta.Offset, Encoding.UTF8.GetString(message.Value)); } }
/// <summary> /// Serves GetTypes And GetStatus Methods /// Writes Requested data into the kafka server using kafka producer /// </summary> /// <typeparam name="T"></typeparam> /// <param name="subTopic"></param> /// <param name="uri"></param> private static void ServeGetTpesAndStatus <T>(string subTopic, Uri uri) { List <T> itemList = new List <T>(); string response = JsonConvert.SerializeObject(itemList); string responseTopic = ServiceTopics.ResponseTopic + subTopic; Message msg = new Message(response); var responseOptions = new KafkaOptions(uri); var responseRouter = new BrokerRouter(responseOptions); var producer = new Producer(responseRouter); producer.SendMessageAsync(responseTopic, new List <Message> { msg }).Wait(); }
private void SendTheMessageToRemoteHost(string message) { var uri = new Uri(Host); var options = new KafkaOptions(uri); var msg = new Message(message); var router = new BrokerRouter(options); var client = new Producer(router); client.SendMessageAsync(Topic, new List <Message> { msg }).Wait(); }
private static BrokerRouter InitDefaultConfig() { List <Uri> ZKURIList = new List <Uri>(); //building UriList foreach (string s in ConfigurationManager.AppSettings["BrokerList"].Split(',')) { ZKURIList.Add(new Uri(s)); } var Options = new KafkaOptions(ZKURIList.ToArray()); var Router = new BrokerRouter(Options); return(Router); }
//consumer static void Main(string[] args) { string topic = "mytopic"; Uri uri = new Uri("http://localhost:9092"); var options = new KafkaOptions(uri); var router = new BrokerRouter(options); var consumer = new Consumer(new ConsumerOptions(topic, router)); foreach (var message in consumer.Consume()) { //Console.WriteLine("Partition ID:{0}\nOffset:{1}\nValue:{2}",message.Meta.PartitionId, message.Meta.Offset, Encoding.UTF8.GetString(message.Value)); Console.WriteLine(Encoding.UTF8.GetString(message.Value)); } Console.ReadLine(); }
public static Producer staticInit() { string[] nodesStr = Program.args.KafkaNodes.Split(','); kafkaNodes = new List <Uri>(nodesStr.Length); foreach (string node in nodesStr) { try { kafkaNodes.Add(new Uri(node)); } catch (Exception e) { Program.pex("Argument Error: Invalid KafkaNodes value: " + node + "; found in: " + Program.args.KafkaNodes, e); } } var tmp = kafkaNodes.ToArray(); KafkaOptions options = new KafkaOptions(tmp); BrokerRouter router = new BrokerRouter(options); producer = new Producer(router); return(producer); }
private static void Produce(string broker, string topic) { var options = new KafkaOptions(new Uri(broker)); var router = new BrokerRouter(options); var client = new Producer(router); var current_datetime = DateTime.Now; var key = current_datetime.Second.ToString(); var events = new[] { new Message("Hello World " + current_datetime.ToString(), key) }; client.SendMessageAsync(topic, events).Wait(1500); Console.WriteLine("Produced: Key: {0}. Message: {1}", key, events[0].Value.ToUtf8String()); using (client) { } }
public void Dispose_CallsFlushWithTimeout() { var options = new KafkaOptions { Publisher = { FlushTimeout = TimeSpan.MaxValue } }; var producer = Substitute.For <IKafkaProducer>(); var sut = new KafkaRawMessagePublisher(() => producer, options); sut.Dispose(); producer.Received(1).Dispose(); producer.Received(1).Flush(TimeSpan.MaxValue); }
public KafkaConsumerService(IOptions <KafkaOptions> kafkaOptions, ILogger <KafkaConsumerService> logger, IServiceProvider serviceProvider) { KafkaConsumer = kafkaOptions.Value.Consumers[0]; conf = new ConsumerConfig { GroupId = KafkaConsumer.GroupId, ClientId = KafkaConsumer.ClientId, BootstrapServers = kafkaOptions.Value.Settings.BootstrapServer, AutoOffsetReset = AutoOffsetReset.Latest }; this.kafkaOptions = kafkaOptions.Value; this.serviceProvider = serviceProvider; }
public DestinationKafka(params Uri[] servers) { var options = new KafkaOptions(servers) { Log = new ConsoleLogger() }; _router = new BrokerRouter(options); _producer = new Producer(_router, maximumMessageBuffer: 5000, maximumAsyncRequests: 10) { BatchSize = 1000, BatchDelayTime = TimeSpan.FromSeconds(1) }; StatisticsTracker.OnStatisticsHeartbeat += StatisticsTracker_OnStatisticsHeartbeat; }
public void KafkaBaseTest() { var options = new KafkaOptions(new Uri("http://localhost:9092")); //var options = new KafkaOptions(new Uri("http://52.36.99.52:9092")); var router = new BrokerRouter(options); var client = new Producer(router); client.SendMessageAsync("TestHarness", new[] { new Message("Test") }).Wait(); var consumer = new Consumer(new ConsumerOptions("TestHarness", router)); foreach (var data in consumer.Consume()) { Trace.WriteLine(data.Value.ToUtf8String()); } }