// Update metadata from latest Kafka records private void UpdateMetadata() { // Attempt to retrieve last known metadata record from Kafka try { using (BrokerRouter router = new BrokerRouter(new KafkaOptions(m_servers) { Log = new TimeSeriesLogger( (status, args) => OnStatusMessage(MessageLevel.Info, string.Format(status, args)), ex => OnProcessException(MessageLevel.Warning, new InvalidOperationException($"[{MetadataTopic}]: {ex.Message}", ex))) })) { Ticks serializationTime; OnStatusMessage(MessageLevel.Info, "Reading latest time-series metadata records from Kafka..."); TimeSeriesMetadata metadata = TimeSeriesMetadata.ReadFromKafka(router, MetadataTopic, status => OnStatusMessage(MessageLevel.Info, status), out serializationTime); if ((object)metadata != null) { m_metadata = metadata; OnStatusMessage(MessageLevel.Info, $"Deserialized {m_metadata.Count:N0} Kafka time-series metadata records, version {m_metadata.Version:N0}, from \"{MetadataTopic}\" serialized at {serializationTime.ToString(MetadataRecord.DateTimeFormat)}"); if (m_lastMetadataVersion != MetadataVersion) { m_lastMetadataVersion = MetadataVersion; m_metadataUpdateCount++; } // Cache metadata locally, if configured m_cacheMetadataLocally?.RunOnceAsync(); } } } catch (Exception ex) { if (Enabled) { // Treat exception as a warning if metadata already exists if ((object)m_metadata == null) { throw; } OnStatusMessage(MessageLevel.Warning, $"Failed to read latest Kafka time-series metadata records from topic \"{MetadataTopic}\": {ex.Message}"); } } }
/// <summary> /// Writes time-series metadata to specified Kafka <paramref name="topic"/>. /// </summary> /// <param name="metadata">Source time-series metadata object to write to Kafka.</param> /// <param name="router">Kafka router connection.</param> /// <param name="topic">Kafka topic.</param> public static void WriteToKafka(TimeSeriesMetadata metadata, BrokerRouter router, string topic) { if ((object)metadata == null) { throw new ArgumentNullException(nameof(metadata)); } if ((object)router == null) { throw new ArgumentNullException(nameof(router)); } if (string.IsNullOrWhiteSpace(topic)) { throw new ArgumentNullException(nameof(topic)); } using (MemoryStream stream = new MemoryStream()) { Serialize(metadata, stream, true); using (Producer producer = new Producer(router)) { Message[] messages = new Message[2]; byte[] timeKey = BitConverter.GetBytes(DateTime.UtcNow.Ticks); // First message used to serialize metadata size (since metadata messages can be large) messages[SizeMessage] = new Message { Key = timeKey, Value = BitConverter.GetBytes(stream.Length) }; // Second message used to serialize metadata value messages[ValueMessage] = new Message { Key = timeKey, Value = stream.ToArray() }; // Send meta-data to Kafka producer.SendMessageAsync(topic, messages).Wait(); } } }
/// <summary> /// Caches meta-data locally. /// </summary> /// <param name="metadata">Source time-series metadata object to cache.</param> /// <param name="topic">Kafka topic.</param> /// <param name="statusMessage">Status message function.</param> public static void CacheLocally(TimeSeriesMetadata metadata, string topic, Action <string> statusMessage) { // Cache meta-data locally so it can be reviewed string cacheFileName = "undefined"; try { // Define default cache path string cachePath = null; try { // Attempt to retrieve configuration cache path as defined in the config file ConfigurationFile configFile = ConfigurationFile.Current; CategorizedSettingsElementCollection systemSettings = configFile.Settings["systemSettings"]; CategorizedSettingsElement configurationCachePathSetting = systemSettings["ConfigurationCachePath"]; if ((object)configurationCachePathSetting != null) { cachePath = FilePath.GetAbsolutePath(systemSettings["ConfigurationCachePath"].Value); } if (string.IsNullOrEmpty(cachePath)) { cachePath = $"{FilePath.GetAbsolutePath("")}{Path.DirectorySeparatorChar}ConfigurationCache{Path.DirectorySeparatorChar}"; } } catch (ConfigurationErrorsException) { cachePath = $"{FilePath.GetAbsolutePath("")}{Path.DirectorySeparatorChar}ConfigurationCache{Path.DirectorySeparatorChar}"; } cacheFileName = Path.Combine(cachePath, $"{topic}.xml"); using (FileStream stream = File.Create(cacheFileName)) Serialize(metadata, stream, false); } catch (Exception ex) { statusMessage?.Invoke($"WARNING: Failed to locally cache current metadata to \"{cacheFileName}\": {ex.Message}"); } }
// Static Methods /// <summary> /// Serializes <see cref="TimeSeriesMetadata"/> instance to a stream of XML. /// </summary> /// <param name="metadata">Source time-series metadata object to serialize.</param> /// <param name="serializationStream">Destination stream to hold serialized data.</param> /// <param name="incrementVersion">Determines of metadata version should be incremented.</param> public static void Serialize(TimeSeriesMetadata metadata, Stream serializationStream, bool incrementVersion) { if ((object)metadata == null) { throw new ArgumentNullException(nameof(metadata)); } if ((object)serializationStream == null) { throw new ArgumentNullException(nameof(serializationStream)); } // Increment serialization version if (incrementVersion) { metadata.Version++; } XmlSerializer serializer = new XmlSerializer(typeof(TimeSeriesMetadata)); using (TextWriter writer = new StreamWriter(serializationStream, Encoding.UTF8, 8192, true)) serializer.Serialize(writer, metadata); }
// Static Methods /// <summary> /// Serializes <see cref="TimeSeriesMetadata"/> instance to a stream of XML. /// </summary> /// <param name="metadata">Source time-series metadata object to serialize.</param> /// <param name="serializationStream">Destination stream to hold serialized data.</param> /// <param name="incrementVersion">Determines of metadata version should be incremented.</param> public static void Serialize(TimeSeriesMetadata metadata, Stream serializationStream, bool incrementVersion) { if ((object)metadata == null) throw new ArgumentNullException(nameof(metadata)); if ((object)serializationStream == null) throw new ArgumentNullException(nameof(serializationStream)); // Increment serialization version if (incrementVersion) metadata.Version++; XmlSerializer serializer = new XmlSerializer(typeof(TimeSeriesMetadata)); using (TextWriter writer = new StreamWriter(serializationStream, Encoding.UTF8, 8192, true)) serializer.Serialize(writer, metadata); }
/// <summary> /// Caches meta-data locally. /// </summary> /// <param name="metadata">Source time-series metadata object to cache.</param> /// <param name="topic">Kafka topic.</param> /// <param name="statusMessage">Status message function.</param> public static void CacheLocally(TimeSeriesMetadata metadata, string topic, Action<string> statusMessage) { // Cache meta-data locally so it can be reviewed string cacheFileName = "undefined"; try { // Define default cache path string cachePath = null; try { // Attempt to retrieve configuration cache path as defined in the config file ConfigurationFile configFile = ConfigurationFile.Current; CategorizedSettingsElementCollection systemSettings = configFile.Settings["systemSettings"]; CategorizedSettingsElement configurationCachePathSetting = systemSettings["ConfigurationCachePath"]; if ((object)configurationCachePathSetting != null) cachePath = FilePath.GetAbsolutePath(systemSettings["ConfigurationCachePath"].Value); if (string.IsNullOrEmpty(cachePath)) cachePath = $"{FilePath.GetAbsolutePath("")}{Path.DirectorySeparatorChar}ConfigurationCache{Path.DirectorySeparatorChar}"; } catch (ConfigurationErrorsException) { cachePath = $"{FilePath.GetAbsolutePath("")}{Path.DirectorySeparatorChar}ConfigurationCache{Path.DirectorySeparatorChar}"; } cacheFileName = Path.Combine(cachePath, $"{topic}.xml"); using (FileStream stream = File.Create(cacheFileName)) Serialize(metadata, stream, false); } catch (Exception ex) { statusMessage?.Invoke($"WARNING: Failed to locally cache current metadata to \"{cacheFileName}\": {ex.Message}"); } }
/// <summary> /// Writes time-series metadata to specified Kafka <paramref name="topic"/>. /// </summary> /// <param name="metadata">Source time-series metadata object to write to Kafka.</param> /// <param name="router">Kafka router connection.</param> /// <param name="topic">Kafka topic.</param> public static void WriteToKafka(TimeSeriesMetadata metadata, BrokerRouter router, string topic) { if ((object)metadata == null) throw new ArgumentNullException(nameof(metadata)); if ((object)router == null) throw new ArgumentNullException(nameof(router)); if (string.IsNullOrWhiteSpace(topic)) throw new ArgumentNullException(nameof(topic)); using (MemoryStream stream = new MemoryStream()) { Serialize(metadata, stream, true); using (Producer producer = new Producer(router)) { Message[] messages = new Message[2]; byte[] timeKey = BitConverter.GetBytes(DateTime.UtcNow.Ticks); // First message used to serialize metadata size (since metadata messages can be large) messages[SizeMessage] = new Message { Key = timeKey, Value = BitConverter.GetBytes(stream.Length) }; // Second message used to serialize metadata value messages[ValueMessage] = new Message { Key = timeKey, Value = stream.ToArray() }; // Send meta-data to Kafka producer.SendMessageAsync(topic, messages).Wait(); } } }
/// <summary> /// Executes the metadata refresh in a synchronous fashion. /// </summary> protected override void ExecuteMetadataRefresh() { if (!Initialized || !Enabled || !SerializeMetadata) return; try { using (BrokerRouter router = new BrokerRouter(new KafkaOptions(m_servers) { Log = new TimeSeriesLogger ( (status, args) => OnStatusMessage(MessageLevel.Info, string.Format(status, args)), ex => OnProcessException(MessageLevel.Warning, new InvalidOperationException($"[{MetadataTopic}]: {ex.Message}", ex)) ) })) { // Attempt to retrieve last known metadata record from Kafka if ((object)m_metadata == null) { try { Ticks serializationTime; OnStatusMessage(MessageLevel.Info, "Reading latest time-series metadata records from Kafka..."); m_metadata = TimeSeriesMetadata.ReadFromKafka(router, MetadataTopic, status => OnStatusMessage(MessageLevel.Info, status), out serializationTime); OnStatusMessage(MessageLevel.Info, $"Deserialized {m_metadata.Count:N0} Kafka time-series metadata records, version {m_metadata.Version:N0}, from \"{MetadataTopic}\" serialized at {serializationTime.ToString(MetadataRecord.DateTimeFormat)}"); } catch (Exception ex) { OnStatusMessage(MessageLevel.Warning, $"Failed to read any existing Kafka time-series metadata records from topic \"{MetadataTopic}\": {ex.Message}"); } } // Create new meta-data object based on newly loaded configuration TimeSeriesMetadata metadata = new TimeSeriesMetadata(); try { foreach (DataRow row in DataSource.Tables["ActiveMeasurements"].AsEnumerable()) { MeasurementKey key; if (MeasurementKey.TryParse(row.Field<string>("ID") ?? MeasurementKey.Undefined.ToString(), out key)) { metadata.Records.Add(new MetadataRecord { ID = key.ID, Source = key.Source, UniqueID = row.Field<object>("SignalID").ToString(), PointTag = row.Field<string>("PointTag"), Device = row.Field<string>("Device"), Longitude = row.ConvertField("Longitude", 0.0F), Latitude = row.ConvertField("Latitude", 0.0F), Protocol = row.Field<string>("Protocol"), SignalType = row.Field<string>("SignalType"), EngineeringUnits = row.Field<string>("EngineeringUnits"), PhasorType = row.Field<string>("PhasorType"), Phase = row.Field<string>("Phase"), Description = row.Field<string>("Description"), LastUpdate = row.Field<DateTime>("UpdatedOn").ToString(MetadataRecord.DateTimeFormat) }); } } } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException($"Failed to serialize current time-series metadata records: {ex.Message}", ex)); } if (metadata.Count > 0) { // See if metadata has not been created yet or is different from last known Kafka record if ((object)m_metadata == null || m_metadata.CalculateChecksum() != metadata.CalculateChecksum()) { // Update local metadata reference m_metadata = metadata; // Send updated metadata to Kafka TimeSeriesMetadata.WriteToKafka(m_metadata, router, MetadataTopic); // Cache metadata locally, if configured m_cacheMetadataLocally?.RunOnceAsync(); m_metadataUpdateCount++; OnStatusMessage(MessageLevel.Info, $"Updated \"{MetadataTopic}\" with {m_metadata.Count:N0} Kafka time-series metadata records..."); } else { OnStatusMessage(MessageLevel.Info, $"Latest \"{MetadataTopic}\" is up to date with current time-series metadata records..."); } } else { OnStatusMessage(MessageLevel.Warning, "No available local time-series metadata available to serialize..."); } } } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException($"Failed to update \"{MetadataTopic}\" with current time-series metadata records: {ex.Message}", ex)); } }
/// <summary> /// Initializes <see cref="TimeSeriesProducer"/>. /// </summary> public override void Initialize() { base.Initialize(); Dictionary <string, string> settings = Settings; string setting; int intValue; double doubleValue; // Parse required settings if (!settings.TryGetValue(nameof(Servers), out setting) || string.IsNullOrWhiteSpace(setting)) { throw new ArgumentException($"Required \"{nameof(Servers)}\" setting is missing."); } Servers = setting.Trim(); m_servers = Servers.Split(',').Select(uri => new Uri(uri)).ToArray(); // Parse optional settings if (settings.TryGetValue(nameof(Topic), out setting) && !string.IsNullOrWhiteSpace(setting)) { Topic = setting.Trim(); } else { Topic = TimeSeriesProducer.DefaultTopic; } if (settings.TryGetValue(nameof(Partitions), out setting) && int.TryParse(setting, out intValue)) { Partitions = intValue; } else { Partitions = TimeSeriesProducer.DefaultPartitions; } if (settings.TryGetValue(nameof(TrackConsumerOffset), out setting)) { TrackConsumerOffset = setting.ParseBoolean(); } else { TrackConsumerOffset = DefaultTrackConsumerIndex; } if (!settings.TryGetValue(nameof(ConsumerOffsetFileName), out setting) || string.IsNullOrWhiteSpace(setting)) { setting = Name + ".offset"; } ConsumerOffsetFileName = FilePath.GetAbsolutePath(setting); if (settings.TryGetValue(nameof(ConsumerOffsetCacheInterval), out setting) && double.TryParse(setting, out doubleValue)) { ConsumerOffsetCacheInterval = doubleValue; } else { ConsumerOffsetCacheInterval = DefaultConsumerOffsetCacheInterval; } if (settings.TryGetValue(nameof(ReadDelay), out setting) && int.TryParse(setting, out intValue)) { ReadDelay = intValue; } else { ReadDelay = DefaultReadDelay; } if (settings.TryGetValue(nameof(CacheMetadataLocally), out setting)) { CacheMetadataLocally = setting.ParseBoolean(); } else { CacheMetadataLocally = TimeSeriesProducer.DefaultCacheMetadataLocally; } if (CacheMetadataLocally) { m_cacheMetadataLocally = new LongSynchronizedOperation(() => TimeSeriesMetadata.CacheLocally(m_metadata, MetadataTopic, OnStatusMessage)) { IsBackground = true } } ; if ((object)OutputMeasurements != null && OutputMeasurements.Length > 0) { m_outputMeasurementKeys = new HashSet <MeasurementKey>(OutputMeasurements.Select(m => m.Key)); } }
// Update metadata from latest Kafka records private void UpdateMetadata() { // Attempt to retrieve last known metadata record from Kafka try { using (BrokerRouter router = new BrokerRouter(new KafkaOptions(m_servers) { Log = new TimeSeriesLogger( (status, args) => OnStatusMessage(MessageLevel.Info, string.Format(status, args)), ex => OnProcessException(MessageLevel.Warning, new InvalidOperationException($"[{MetadataTopic}]: {ex.Message}", ex))) })) { Ticks serializationTime; OnStatusMessage(MessageLevel.Info, "Reading latest time-series metadata records from Kafka..."); TimeSeriesMetadata metadata = TimeSeriesMetadata.ReadFromKafka(router, MetadataTopic, status => OnStatusMessage(MessageLevel.Info, status), out serializationTime); if ((object)metadata != null) { m_metadata = metadata; OnStatusMessage(MessageLevel.Info, $"Deserialized {m_metadata.Count:N0} Kafka time-series metadata records, version {m_metadata.Version:N0}, from \"{MetadataTopic}\" serialized at {serializationTime.ToString(MetadataRecord.DateTimeFormat)}"); if (m_lastMetadataVersion != MetadataVersion) { m_lastMetadataVersion = MetadataVersion; m_metadataUpdateCount++; } // Cache metadata locally, if configured m_cacheMetadataLocally?.RunOnceAsync(); } } } catch (Exception ex) { if (Enabled) { // Treat exception as a warning if metadata already exists if ((object)m_metadata == null) throw; OnStatusMessage(MessageLevel.Warning, $"Failed to read latest Kafka time-series metadata records from topic \"{MetadataTopic}\": {ex.Message}"); } } }
/// <summary> /// Executes the metadata refresh in a synchronous fashion. /// </summary> protected override void ExecuteMetadataRefresh() { if (!Initialized || !Enabled || !SerializeMetadata) { return; } try { using (BrokerRouter router = new BrokerRouter(new KafkaOptions(m_servers) { Log = new TimeSeriesLogger ( (status, args) => OnStatusMessage(MessageLevel.Info, string.Format(status, args)), ex => OnProcessException(MessageLevel.Warning, new InvalidOperationException($"[{MetadataTopic}]: {ex.Message}", ex)) ) })) { // Attempt to retrieve last known metadata record from Kafka if ((object)m_metadata == null) { try { Ticks serializationTime; OnStatusMessage(MessageLevel.Info, "Reading latest time-series metadata records from Kafka..."); m_metadata = TimeSeriesMetadata.ReadFromKafka(router, MetadataTopic, status => OnStatusMessage(MessageLevel.Info, status), out serializationTime); OnStatusMessage(MessageLevel.Info, $"Deserialized {m_metadata.Count:N0} Kafka time-series metadata records, version {m_metadata.Version:N0}, from \"{MetadataTopic}\" serialized at {serializationTime.ToString(MetadataRecord.DateTimeFormat)}"); } catch (Exception ex) { OnStatusMessage(MessageLevel.Warning, $"Failed to read any existing Kafka time-series metadata records from topic \"{MetadataTopic}\": {ex.Message}"); } } // Create new meta-data object based on newly loaded configuration TimeSeriesMetadata metadata = new TimeSeriesMetadata(); try { foreach (DataRow row in DataSource.Tables["ActiveMeasurements"].AsEnumerable()) { MeasurementKey key; if (MeasurementKey.TryParse(row.Field <string>("ID") ?? MeasurementKey.Undefined.ToString(), out key)) { metadata.Records.Add(new MetadataRecord { ID = key.ID, Source = key.Source, UniqueID = row.Field <object>("SignalID").ToString(), PointTag = row.Field <string>("PointTag"), Device = row.Field <string>("Device"), Longitude = row.ConvertField("Longitude", 0.0F), Latitude = row.ConvertField("Latitude", 0.0F), Protocol = row.Field <string>("Protocol"), SignalType = row.Field <string>("SignalType"), EngineeringUnits = row.Field <string>("EngineeringUnits"), PhasorType = row.Field <string>("PhasorType"), Phase = row.Field <string>("Phase"), Description = row.Field <string>("Description"), LastUpdate = row.Field <DateTime>("UpdatedOn").ToString(MetadataRecord.DateTimeFormat) }); } } } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException($"Failed to serialize current time-series metadata records: {ex.Message}", ex)); } if (metadata.Count > 0) { // See if metadata has not been created yet or is different from last known Kafka record if ((object)m_metadata == null || m_metadata.CalculateChecksum() != metadata.CalculateChecksum()) { // Update local metadata reference m_metadata = metadata; // Send updated metadata to Kafka TimeSeriesMetadata.WriteToKafka(m_metadata, router, MetadataTopic); // Cache metadata locally, if configured m_cacheMetadataLocally?.RunOnceAsync(); m_metadataUpdateCount++; OnStatusMessage(MessageLevel.Info, $"Updated \"{MetadataTopic}\" with {m_metadata.Count:N0} Kafka time-series metadata records..."); } else { OnStatusMessage(MessageLevel.Info, $"Latest \"{MetadataTopic}\" is up to date with current time-series metadata records..."); } } else { OnStatusMessage(MessageLevel.Warning, "No available local time-series metadata available to serialize..."); } } } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException($"Failed to update \"{MetadataTopic}\" with current time-series metadata records: {ex.Message}", ex)); } }
/// <summary> /// Initializes <see cref="TimeSeriesProducer"/>. /// </summary> public override void Initialize() { base.Initialize(); Dictionary <string, string> settings = Settings; string setting; int value; // Parse required settings if (!settings.TryGetValue(nameof(Servers), out setting) || string.IsNullOrWhiteSpace(setting)) { throw new ArgumentException($"Required \"{nameof(Servers)}\" setting is missing."); } Servers = setting.Trim(); m_servers = Servers.Split(',').Select(uri => new Uri(uri)).ToArray(); // Parse optional settings if (settings.TryGetValue(nameof(Topic), out setting) && !string.IsNullOrWhiteSpace(setting)) { Topic = setting.Trim(); } else { Topic = DefaultTopic; } if (settings.TryGetValue(nameof(Partitions), out setting) && int.TryParse(setting, out value)) { Partitions = value; } else { Partitions = DefaultPartitions; } if (settings.TryGetValue(nameof(Encoding), out setting)) { Encoding = setting; } else { Encoding = null; } if (settings.TryGetValue(nameof(TimestampFormat), out setting)) { TimestampFormat = setting; } else { TimestampFormat = DefaultTimestampFormat; } if (settings.TryGetValue(nameof(ValueFormat), out setting)) { ValueFormat = setting; } else { ValueFormat = DefaultValueFormat; } if (settings.TryGetValue(nameof(SerializeMetadata), out setting)) { SerializeMetadata = setting.ParseBoolean(); } else { SerializeMetadata = DefaultSerializeMetadata; } if (settings.TryGetValue(nameof(CacheMetadataLocally), out setting)) { CacheMetadataLocally = setting.ParseBoolean(); } else { CacheMetadataLocally = DefaultCacheMetadataLocally; } if (CacheMetadataLocally) { m_cacheMetadataLocally = new LongSynchronizedOperation(() => TimeSeriesMetadata.CacheLocally(m_metadata, MetadataTopic, status => OnStatusMessage(MessageLevel.Info, status))) { IsBackground = true } } ; }