public Logic(ProducerStage <TKey, TValue, TPass> self) : base(self.Shape) { this.self = self; this.producer = self.producerProvider(); SetHandler(self.Out, onPull: () => TryPull(self.In)); SetHandler(self.In, onPush: () => { var msg = Grab(self.In); var result = SendToProducer(msg); awaitingConfirmation.IncrementAndGet(); Push(self.Out, result); }, onUpstreamFinish: () => { inputIsClosed = true; completionState.SetResult(NotUsed.Instance); CheckForCompletion(); }, onUpstreamFailure: cause => { inputIsClosed = true; completionState.SetException(cause); CheckForCompletion(); }); }
public static void Main(string[] args) { using (var producer = new Producer("localhost:9092")) using (var topic = producer.Topic("test")) { var data = Encoding.UTF8.GetBytes("Hello Kafka"); var deliveryReport = topic.Produce(data).Result; Console.WriteLine($"Produced to Partition: {deliveryReport.Partition}, Offset: {deliveryReport.Offset}"); } var config = new Config {GroupId = "test-consumer"}; using (var consumer = new EventConsumer(config, "localhost:9092")) { consumer.OnMessage += (obj, msg) => { var text = Encoding.UTF8.GetString(msg.Payload, 0, msg.Payload.Length); Console.WriteLine($"Topic: {msg.Topic} Partition: {msg.Partition} Offset: {msg.Offset} {text}"); }; consumer.Subscribe(new List<string> {"test"}); consumer.Start(); Console.ReadLine(); } }
internal Topic(SafeKafkaHandle kafkaHandle, Producer producer, string topic, TopicConfig config) { this.producer = producer; config = config ?? new TopicConfig(); config["produce.offset.report"] = "true"; IntPtr configPtr = config.handle.Dup(); if (config.CustomPartitioner != null) { PartitionerDelegate = (IntPtr rkt, IntPtr keydata, UIntPtr keylen, int partition_cnt, IntPtr rkt_opaque, IntPtr msg_opaque) => { byte[] key = null; if (keydata != IntPtr.Zero) { key = new byte[(int) keylen]; Marshal.Copy(keydata, key, 0, (int) keylen); } return config.CustomPartitioner(this, key, partition_cnt); }; LibRdKafka.topic_conf_set_partitioner_cb(configPtr, PartitionerDelegate); } handle = kafkaHandle.Topic(topic, configPtr); }
internal AvroKafkaProducer(RdKafka.Producer producer, RdKafka.Topic topic, int keySchemaId, int valueSchemaId) { _producer = producer; _topic = topic; _keySchemaId = keySchemaId; _valueSchemaId = valueSchemaId; _keySchemaBytes = BitConverter.GetBytes(keySchemaId); Array.Reverse(_keySchemaBytes); _valueSchemaBytes = BitConverter.GetBytes(valueSchemaId); Array.Reverse(_valueSchemaBytes); }
private static Thread StartProducer(CancellationTokenSource tokenSource) { var timer = Metric.Timer("Published", Unit.Events); var thread = new Thread(() => { var topicConfig = new TopicConfig(); topicConfig["request.required.acks"] = "0"; // Don't require an ack from the broker. var config = new Config { GroupId = consumerGroupName, EnableAutoCommit = true, StatisticsInterval = TimeSpan.FromSeconds(10), DefaultTopicConfig = topicConfig }; config["socket.blocking.max.ms"] = "1"; // Maximum time a broker socket operation may block. config["queue.buffering.max.ms"] = "1"; // Maximum time to buffer data when using async mode. using (var publisher = new Producer(config, brokers)) using (var topic = publisher.Topic(topicName)) { while (!tokenSource.IsCancellationRequested) { Thread.Sleep(1000); for (var i = 0; i < 100; i++) { var ticks = DateTime.UtcNow.Ticks; topic.Produce(Encoding.UTF8.GetBytes(ticks.ToString()), partition: (int)(ticks % 2)) .ContinueWith(task => { if (task.Exception != null) { Console.WriteLine("{0}: Error publishing message - {1}", DateTime.Now.ToLongTimeString(), task.Exception); return; } timer.Record((DateTime.UtcNow.Ticks - ticks) / 10000, TimeUnit.Milliseconds); reports.Add(task.Result); }); } } Console.WriteLine("Producer cancelled."); Thread.CurrentThread.Abort(); } }); thread.Start(); return thread; }
public static void Produce(string broker, string topicName, long numMessages) { using (var producer = new Producer(broker)) using (Topic topic = producer.Topic(topicName)) { Console.WriteLine($"{producer.Name} producing on {topic.Name}"); for (int i = 0; i < numMessages; i++) { byte[] data = Encoding.UTF8.GetBytes(i.ToString()); topic.Produce(data); // TODO; add continuation, count success/failures } Console.WriteLine("Shutting down"); } }
public static void Main(string[] args) { string brokerList = args[0]; string topicName = args[1]; var topicConfig = new TopicConfig { CustomPartitioner = (top, key, cnt) => { var kt = (key != null) ? Encoding.UTF8.GetString(key, 0, key.Length) : "(null)"; int partition = (key?.Length ?? 0) % cnt; bool available = top.PartitionAvailable(partition); Console.WriteLine($"Partitioner topic: {top.Name} key: {kt} partition count: {cnt} -> {partition} {available}"); return partition; } }; using (Producer producer = new Producer(brokerList)) using (Topic topic = producer.Topic(topicName, topicConfig)) { Console.WriteLine($"{producer.Name} producing on {topic.Name}. q to exit."); string text; while ((text = Console.ReadLine()) != "q") { byte[] data = Encoding.UTF8.GetBytes(text); byte[] key = null; // Use the first word as the key int index = text.IndexOf(" "); if (index != -1) { key = Encoding.UTF8.GetBytes(text.Substring(0, index)); } Task<DeliveryReport> deliveryReport = topic.Produce(data, key); var unused = deliveryReport.ContinueWith(task => { Console.WriteLine($"Partition: {task.Result.Partition}, Offset: {task.Result.Offset}"); }); } } }
static async Task PrintMetadata(string brokerList) { using (var producer = new Producer(brokerList)) { var meta = await producer.Metadata(); Console.WriteLine($"{meta.OriginatingBrokerId} {meta.OriginatingBrokerName}"); meta.Brokers.ForEach(broker => Console.WriteLine($"Broker: {broker.BrokerId} {broker.Host}:{broker.Port}")); meta.Topics.ForEach(topic => { Console.WriteLine($"Topic: {topic.Topic} {topic.Error}"); topic.Partitions.ForEach(partition => { Console.WriteLine($" Partition: {partition.PartitionId}"); Console.WriteLine($" Replicas: {ToString(partition.Replicas)}"); Console.WriteLine($" InSyncReplicas: {ToString(partition.InSyncReplicas)}"); }); }); } }
public static void Main(string[] args) { string brokerList = args[0]; string topicName = args[1]; using (Producer producer = new Producer(brokerList)) using (Topic topic = producer.Topic(topicName)) { Console.WriteLine($"{producer.Name} producing on {topic.Name}. q to exit."); string text; while ((text = Console.ReadLine()) != "q") { byte[] data = Encoding.UTF8.GetBytes(text); Task<DeliveryReport> deliveryReport = topic.Produce(data); var unused = deliveryReport.ContinueWith(task => { Console.WriteLine($"Partition: {task.Result.Partition}, Offset: {task.Result.Offset}"); }); } } }
static async Task ListGroups(string brokerList) { using (var producer = new Producer(brokerList)) { var groups = await producer.ListGroups(TimeSpan.FromSeconds(10)); Console.WriteLine($"Consumer Groups:"); foreach (var g in groups) { Console.WriteLine($" Group: {g.Group} {g.Error} {g.State}"); Console.WriteLine($" Broker: {g.Broker.BrokerId} {g.Broker.Host}:{g.Broker.Port}"); Console.WriteLine($" Protocol: {g.ProtocolType} {g.Protocol}"); Console.WriteLine($" Members:"); foreach (var m in g.Members) { Console.WriteLine($" {m.MemberId} {m.ClientId} {m.ClientHost}"); Console.WriteLine($" Metadata: {m.MemberMetadata.Length} bytes"); //Console.WriteLine(System.Text.Encoding.UTF8.GetString(m.MemberMetadata)); Console.WriteLine($" Assignment: {m.MemberAssignment.Length} bytes"); //Console.WriteLine(System.Text.Encoding.UTF8.GetString(m.MemberAssignment)); } } } }
private static void StartProducer(CancellationTokenSource tokenSource) { var timer = Metric.Timer("Published", Unit.Events); var sent = 0; new Thread(() => { using (var publisher = new Producer(brokers)) using (var topic = publisher.Topic(topicName)) { while (!tokenSource.IsCancellationRequested) { if (sent >= Messages) return; Thread.Sleep(1000); Task.Run(() => { for (var i = 0; i < 100; i++) { var ticks = DateTime.UtcNow.Ticks; topic.Produce(Encoding.UTF8.GetBytes(ticks.ToString()), partition: (int)(ticks % 2)) .ContinueWith(task => { if (task.Exception != null) { Console.WriteLine("{0}: Error publishing message - {1}", DateTime.Now.ToLongTimeString(), task.Exception); return; } timer.Record((DateTime.UtcNow.Ticks - ticks) / 10000, TimeUnit.Milliseconds); reports.Add(task.Result); }); sent++; if (sent >= Messages) return; } }); } Console.WriteLine("Producer cancelled."); } }).Start(); }