/// <summary> /// Writes time-series metadata to specified Kafka <paramref name="topic"/>. /// </summary> /// <param name="metadata">Source time-series metadata object to write to Kafka.</param> /// <param name="router">Kafka router connection.</param> /// <param name="topic">Kafka topic.</param> public static void WriteToKafka(TimeSeriesMetadata metadata, BrokerRouter router, string topic) { if ((object)metadata == null) throw new ArgumentNullException(nameof(metadata)); if ((object)router == null) throw new ArgumentNullException(nameof(router)); if (string.IsNullOrWhiteSpace(topic)) throw new ArgumentNullException(nameof(topic)); using (MemoryStream stream = new MemoryStream()) { Serialize(metadata, stream, true); using (Producer producer = new Producer(router)) { Message[] messages = new Message[2]; byte[] timeKey = BitConverter.GetBytes(DateTime.UtcNow.Ticks); // First message used to serialize metadata size (since metadata messages can be large) messages[SizeMessage] = new Message { Key = timeKey, Value = BitConverter.GetBytes(stream.Length) }; // Second message used to serialize metadata value messages[ValueMessage] = new Message { Key = timeKey, Value = stream.ToArray() }; // Send meta-data to Kafka producer.SendMessageAsync(topic, messages).Wait(); } } }
/// <summary> /// Attempts to connect to data output stream. /// </summary> /// <remarks> /// Derived classes should attempt connection to data output stream here. Any exceptions thrown /// by this implementation will result in restart of the connection cycle. /// </remarks> protected override void AttemptConnection() { m_router = new BrokerRouter(new KafkaOptions(m_servers) { Log = new TimeSeriesLogger ( (status, args) => OnStatusMessage(MessageLevel.Info, string.Format(status, args)), ex => OnProcessException(MessageLevel.Warning, ex) ) }); m_producer = new Producer(m_router); MetadataRefreshOperation.RunOnceAsync(); }
/// <summary> /// Attempts to disconnect from data output stream. /// </summary> /// <remarks> /// Derived classes should attempt disconnect from data output stream here. Any exceptions thrown /// by this implementation will be reported to host via <see cref="AdapterBase.ProcessException"/> event. /// </remarks> protected override void AttemptDisconnection() { if ((object)m_producer != null) { m_producer.Dispose(); m_producer = null; } if ((object)m_router != null) { m_router.Dispose(); m_router = null; } }
static void Main(string[] args) { var options = new KafkaOptions(new Uri("http://kafka1:9092"), new Uri("http://kafka2:9092")) { Log = new ConsoleLog() }; var router = new BrokerRouter(options); var client = new Producer(router); var timing = new RollingQueue<double>(50); var rate = new RollingQueue<double>(50); var second = DateTime.Now.Second; var count = 0; Task.Run(() => { var consumer = new Consumer(new ConsumerOptions("latencies", router)); var position = consumer.GetTopicOffsetAsync("latencies"); position.Wait(); consumer.SetOffsetPosition( position.Result .Select(p => new OffsetPosition(p.PartitionId, p.Offsets.First())) .ToArray() ); foreach (var data in consumer.Consume()) { count++; var rtt = (DateTime.Now - new DateTime( long.Parse(Encoding.UTF8.GetString(data.Value)) )).TotalMilliseconds; if (rtt < 1000) timing.Enqueue(rtt); if (second != DateTime.Now.Second) { second = DateTime.Now.Second; rate.Enqueue(count); count = 0; Console.WriteLine("Rate: {0} pps.\t{1} ", rate.Average().ToString("N2") , (rtt < 1000) ? "RTT: " + timing.Average().ToString("N2") + " ms." : string.Empty ); } } }); while (true) { client.SendMessageAsync("latencies", new[] { new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()), new Message(DateTime.Now.Ticks.ToString()) }, 1); Thread.Sleep(1); } client.Dispose(); router.Dispose(); }