private void BtnGetMetadata_Click(object sender, EventArgs e) { // Do the following on button click or missing configuration, etc: // Note that openHistorian internal publisher controls how many tables / fields to send as meta-data to subscribers (user controllable), // as a result, not all fields in associated database views will be available. Below are the default SELECT filters the publisher // will apply to the "MeasurementDetail", "DeviceDetail" and "PhasorDetail" database views: // SELECT NodeID, UniqueID, OriginalSource, IsConcentrator, Acronym, Name, ParentAcronym, ProtocolName, FramesPerSecond, Enabled FROM DeviceDetail WHERE IsConcentrator = 0 // SELECT Internal, DeviceAcronym, DeviceName, SignalAcronym, ID, SignalID, PointTag, SignalReference, Description, Enabled FROM MeasurementDetail // SELECT DeviceAcronym, Label, Type, Phase, SourceIndex FROM PhasorDetail DataTable measurementTable = null; DataTable deviceTable = null; DataTable phasorTable = null; string server = "Server=" + TxtServerIP.Text.Trim() + "; Port=" + TxtGEPPort.Text.Trim() + "; Interface=0.0.0.0"; try { DataSet metadata = MetadataRetriever.GetMetadata(server); // Reference meta-data tables measurementTable = metadata.Tables["MeasurementDetail"]; deviceTable = metadata.Tables["DeviceDetail"]; phasorTable = metadata.Tables["PhasorDetail"]; } catch (Exception ex) { MessageBox.Show("Exception retrieving meta-data: " + ex.Message); } SortedDictionary <ulong, Tuple <Guid, string, string, string> > pointData = new SortedDictionary <ulong, Tuple <Guid, string, string, string> >(); if ((object)measurementTable != null) { // Could filter measurements if desired (e.g., no stats) DataRow[] measurements = measurementTable.Select("SignalAcronym <> 'STAT' and SignalAcronym <> 'DIGI'"); m_settings.MyData.Tables["Measurements"].Rows.Clear(); // Do something with measurement records foreach (DataRow measurement in measurements) { Guid signalID; MeasurementKey measurementKey; Guid.TryParse(measurement["SignalID"].ToString(), out signalID); MeasurementKey.TryParse(measurement["ID"].ToString(), out measurementKey); pointData[measurementKey.ID] = new Tuple <Guid, string, string, string>(signalID, measurement["DeviceAcronym"].ToString(), measurement["SignalAcronym"].ToString(), measurement["Description"].ToString()); } foreach (KeyValuePair <ulong, Tuple <Guid, string, string, string> > kvp in pointData) { m_settings.MyData.Tables["Measurements"].Rows.Add((int)kvp.Key, kvp.Value.Item1, kvp.Value.Item2, kvp.Value.Item3, kvp.Value.Item4); } } }
public MeasurementRow(DataRow row) { MeasurementKey.TryParse(row["ID"].ToString(), out MeasurementKey measurementKey); PointID = unchecked ((long)measurementKey.ID); DeviceName = row["DeviceAcronym"].ToString(); SignalAcronym = row["SignalAcronym"].ToString(); Description = row["Description"].ToString(); }
private Metadata(DataRow row) { Guid.TryParse(row["SignalID"].ToString(), out SignalID); MeasurementKey.TryParse(row["ID"].ToString(), out MeasurementKey measurementKey); PointID = measurementKey.ID; PointTag = row["PointTag"].ToString(); SignalReference = row["SignalReference"].ToString(); DeviceName = row["DeviceAcronym"].ToString(); SignalAcronym = row["SignalAcronym"].ToString(); Description = row["Description"].ToString(); }
private IEnumerable <DataSourceValueGroup> QueryTarget(Target sourceTarget, string queryExpression, DateTime startTime, DateTime stopTime, string interval, bool decimate, bool dropEmptySeries, CancellationToken cancellationToken) { if (queryExpression.ToLowerInvariant().Contains(DropEmptySeriesCommand)) { dropEmptySeries = true; queryExpression = queryExpression.ReplaceCaseInsensitive(DropEmptySeriesCommand, ""); } // A single target might look like the following: // PPA:15; STAT:20; SETSUM(COUNT(PPA:8; PPA:9; PPA:10)); FILTER ActiveMeasurements WHERE SignalType IN ('IPHA', 'VPHA'); RANGE(PPA:99; SUM(FILTER ActiveMeasurements WHERE SignalType = 'FREQ'; STAT:12)) HashSet <string> targetSet = new HashSet <string>(new[] { queryExpression }, StringComparer.OrdinalIgnoreCase); // Targets include user provided input, so casing should be ignored HashSet <string> reducedTargetSet = new HashSet <string>(StringComparer.OrdinalIgnoreCase); List <Match> seriesFunctions = new List <Match>(); foreach (string target in targetSet) { // Find any series functions in target Match[] matchedFunctions = TargetCache <Match[]> .GetOrAdd(target, () => s_seriesFunctions.Matches(target).Cast <Match>().ToArray()); if (matchedFunctions.Length > 0) { seriesFunctions.AddRange(matchedFunctions); // Reduce target to non-function expressions - important so later split on ';' succeeds properly string reducedTarget = target; foreach (string expression in matchedFunctions.Select(match => match.Value)) { reducedTarget = reducedTarget.Replace(expression, ""); } if (!string.IsNullOrWhiteSpace(reducedTarget)) { reducedTargetSet.Add(reducedTarget); } } else { reducedTargetSet.Add(target); } } if (seriesFunctions.Count > 0) { // Execute series functions foreach (Tuple <SeriesFunction, string, GroupOperation> parsedFunction in seriesFunctions.Select(ParseSeriesFunction)) { foreach (DataSourceValueGroup valueGroup in ExecuteSeriesFunction(sourceTarget, parsedFunction, startTime, stopTime, interval, decimate, dropEmptySeries, cancellationToken)) { yield return(valueGroup); } } // Use reduced target set that excludes any series functions targetSet = reducedTargetSet; } // Query any remaining targets if (targetSet.Count > 0) { // Split remaining targets on semi-colon, this way even multiple filter expressions can be used as inputs to functions string[] allTargets = targetSet.Select(target => target.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries)).SelectMany(currentTargets => currentTargets).ToArray(); // Expand target set to include point tags for all parsed inputs foreach (string target in allTargets) { targetSet.UnionWith(TargetCache <string[]> .GetOrAdd(target, () => AdapterBase.ParseInputMeasurementKeys(Metadata, false, target).Select(key => key.TagFromKey(Metadata)).ToArray())); } Dictionary <ulong, string> targetMap = new Dictionary <ulong, string>(); // Target set now contains both original expressions and newly parsed individual point tags - to create final point list we // are only interested in the point tags, provided either by direct user entry or derived by parsing filter expressions foreach (string target in targetSet) { // Reduce all targets down to a dictionary of point ID's mapped to point tags MeasurementKey key = TargetCache <MeasurementKey> .GetOrAdd(target, () => target.KeyFromTag(Metadata)); if (key == MeasurementKey.Undefined) { Tuple <MeasurementKey, string> result = TargetCache <Tuple <MeasurementKey, string> > .GetOrAdd($"signalID@{target}", () => target.KeyAndTagFromSignalID(Metadata)); key = result.Item1; string pointTag = result.Item2; if (key == MeasurementKey.Undefined) { result = TargetCache <Tuple <MeasurementKey, string> > .GetOrAdd($"key@{target}", () => { MeasurementKey.TryParse(target, out MeasurementKey parsedKey); return(new Tuple <MeasurementKey, string>(parsedKey, parsedKey.TagFromKey(Metadata))); }); key = result.Item1; pointTag = result.Item2; if (key != MeasurementKey.Undefined) { targetMap[key.ID] = pointTag; } } else { targetMap[key.ID] = pointTag; } } else { targetMap[key.ID] = target; } } // Query underlying data source for each target - to prevent parallel read from data source we enumerate immediately List <DataSourceValue> dataValues = QueryDataSourceValues(startTime, stopTime, interval, decimate, targetMap) .TakeWhile(_ => !cancellationToken.IsCancellationRequested).ToList(); foreach (KeyValuePair <ulong, string> target in targetMap) { yield return new DataSourceValueGroup { Target = target.Value, RootTarget = target.Value, SourceTarget = sourceTarget, Source = dataValues.Where(dataValue => dataValue.Target.Equals(target.Value)), DropEmptySeries = dropEmptySeries } } ; } }
/// <summary> /// Initializes this <see cref="FileImporter"/>. /// </summary> public override void Initialize() { base.Initialize(); Dictionary <string, string> settings = Settings; string setting; // Load optional parameters if (settings.TryGetValue("importPath", out setting)) { m_importPath = setting; } else { throw new InvalidOperationException("No import path was specified for EPRI file importer - this is a required setting."); } if (settings.TryGetValue("inputInterval", out setting)) { m_inputInterval = double.Parse(setting); } if (settings.TryGetValue("measurementsPerInterval", out setting)) { m_measurementsPerInterval = int.Parse(setting); } if (settings.TryGetValue("simulateTimestamp", out setting)) { m_simulateTimestamp = setting.ParseBoolean(); } if (settings.TryGetValue("timestampFormat", out setting)) { m_timestampFormat = setting; } if (settings.TryGetValue("skipRows", out setting)) { int.TryParse(setting, out m_skipRows); } if (m_skipRows < 0) { m_skipRows = 0; } settings.TryGetValue("useHighResolutionInputTimer", out setting); if (string.IsNullOrEmpty(setting)) { setting = "false"; } UseHighResolutionInputTimer = setting.ParseBoolean(); if (!UseHighResolutionInputTimer) { m_looseTimer = new Timer(); } // Load column mappings: if (settings.TryGetValue("columnMappings", out setting)) { Dictionary <int, string> columnMappings = new Dictionary <int, string>(); int index; foreach (KeyValuePair <string, string> mapping in setting.ParseKeyValuePairs()) { if (int.TryParse(mapping.Key, out index)) { columnMappings[index] = mapping.Value; } } if (!m_simulateTimestamp && !columnMappings.Values.Contains("Timestamp", StringComparer.OrdinalIgnoreCase)) { throw new InvalidOperationException("One of the column mappings must be defined as a \"Timestamp\": e.g., columnMappings={0=Timestamp; 1=PPA:12; 2=PPA13}."); } // In transverse mode, maximum measurements per interval is set to maximum columns in input file m_measurementsPerInterval = columnMappings.Keys.Max(); // Auto-assign output measurements based on column mappings OutputMeasurements = columnMappings.Where(kvp => string.Compare(kvp.Value, "Timestamp", StringComparison.OrdinalIgnoreCase) != 0).Select(kvp => { string measurementID = kvp.Value; IMeasurement measurement = new Measurement(); MeasurementKey key; Guid id; if (Guid.TryParse(measurementID, out id)) { key = MeasurementKey.LookUpBySignalID(id); } else { MeasurementKey.TryParse(measurementID, out key); } if (key.SignalID != Guid.Empty) { measurement.Metadata = key.Metadata; // Associate measurement with column index m_columnMappings[kvp.Key] = measurement; } return(measurement); }).ToArray(); int timestampColumn = columnMappings.First(kvp => string.Compare(kvp.Value, "Timestamp", StringComparison.OrdinalIgnoreCase) == 0).Key; // Reserve a column mapping for timestamp value IMeasurement timestampMeasurement = new Measurement { Metadata = new MeasurementMetadata(null, "Timestamp", 0, 1, null) }; m_columnMappings[timestampColumn] = timestampMeasurement; } // Override input interval based on temporal processing interval if it's not set to default if (ProcessingInterval > -1) { m_inputInterval = ProcessingInterval == 0 ? 1 : ProcessingInterval; } if ((object)m_looseTimer != null) { m_looseTimer.Interval = m_inputInterval; m_looseTimer.AutoReset = true; m_looseTimer.Elapsed += m_looseTimer_Elapsed; } m_fileSystemWatcher = new SafeFileWatcher(m_importPath, "EPRI-VS-Output-*.csv"); m_fileSystemWatcher.Created += m_fileSystemWatcher_Created; m_fileSystemWatcher.Renamed += m_fileSystemWatcher_Renamed; }
/// <summary> /// Initializes this <see cref="MetricImporter"/>. /// </summary> public override void Initialize() { base.Initialize(); Dictionary <string, string> settings = Settings; string setting; // Load optional parameters if (settings.TryGetValue("importPath", out setting)) { m_importPath = setting; } else { throw new InvalidOperationException("No import path was specified for EPRI metric importer - this is a required setting."); } if (settings.TryGetValue("inputInterval", out setting)) { m_inputInterval = double.Parse(setting); } if (settings.TryGetValue("measurementsPerInterval", out setting)) { m_measurementsPerInterval = int.Parse(setting); } if (settings.TryGetValue("simulateTimestamp", out setting)) { m_simulateTimestamp = setting.ParseBoolean(); } if (settings.TryGetValue("timestampFormat", out setting)) { m_timestampFormat = setting; } if (settings.TryGetValue("skipRows", out setting)) { int.TryParse(setting, out m_skipRows); } if (m_skipRows < 0) { m_skipRows = 0; } settings.TryGetValue("useHighResolutionInputTimer", out setting); if (string.IsNullOrEmpty(setting)) { setting = "false"; } UseHighResolutionInputTimer = setting.ParseBoolean(); if (!UseHighResolutionInputTimer) { m_looseTimer = new System.Timers.Timer(); } // Load column mappings: if (settings.TryGetValue("columnMappings", out setting)) { Dictionary <int, string> columnMappings = new Dictionary <int, string>(); int index; foreach (KeyValuePair <string, string> mapping in setting.ParseKeyValuePairs()) { if (int.TryParse(mapping.Key, out index)) { columnMappings[index] = mapping.Value; } } if (!m_simulateTimestamp && !columnMappings.Values.Contains("Timestamp", StringComparer.OrdinalIgnoreCase)) { throw new InvalidOperationException("One of the column mappings must be defined as a \"Timestamp\": e.g., columnMappings={1=Timestamp; 2=PPA:12; 3=PPA13}."); } // In transverse mode, maximum measurements per interval is set to maximum columns in input file m_measurementsPerInterval = columnMappings.Keys.Max(); // Auto-assign output measurements based on column mappings OutputMeasurements = columnMappings.Where(kvp => string.Compare(kvp.Value, "Timestamp", true) != 0).Select(kvp => { string measurementID = kvp.Value; IMeasurement measurement = new Measurement(); MeasurementKey key; Guid id; if (Guid.TryParse(measurementID, out id)) { measurement.Key = MeasurementKey.LookUpBySignalID(id); } else if (MeasurementKey.TryParse(measurementID, out key)) { measurement.Key = key; } if (measurement.ID != Guid.Empty) { try { DataRow[] filteredRows = DataSource.Tables["ActiveMeasurements"].Select(string.Format("SignalID = '{0}'", measurement.ID)); if (filteredRows.Length > 0) { DataRow row = filteredRows[0]; // Assign other attributes measurement.TagName = row["PointTag"].ToNonNullString(); measurement.Multiplier = double.Parse(row["Multiplier"].ToString()); measurement.Adder = double.Parse(row["Adder"].ToString()); } } catch { // Failure to lookup extra metadata is not catastrophic } // Associate measurement with column index m_columnMappings[kvp.Key] = measurement; } return(measurement); }).ToArray(); int timestampColumn = columnMappings.First(kvp => string.Compare(kvp.Value, "Timestamp", true) == 0).Key; // Reserve a column mapping for timestamp value IMeasurement timestampMeasurement = new Measurement() { TagName = "Timestamp" }; m_columnMappings[timestampColumn] = timestampMeasurement; } // Override input interval based on temporal processing interval if it's not set to default if (ProcessingInterval > -1) { m_inputInterval = ProcessingInterval == 0 ? 1 : ProcessingInterval; } if ((object)m_looseTimer != null) { m_looseTimer.Interval = m_inputInterval; m_looseTimer.AutoReset = true; m_looseTimer.Elapsed += m_looseTimer_Elapsed; } m_fileSystemWatcher = new FileSystemWatcher(m_importPath, "EPRI-VS-Metrics-*.csv"); m_fileSystemWatcher.Created += m_fileSystemWatcher_Created; m_fileSystemWatcher.Renamed += m_fileSystemWatcher_Renamed; }
// Retrieves the measurements from the database. private void GetDbMeasurements(object state) { IDbConnection connection = null; // Get measurements from the database. try { SignalIndexCache signalIndexCache = new SignalIndexCache(); CompactMeasurement measurement; long startTime = DateTime.UtcNow.Ticks; if (m_cacheFileName != null && File.Exists(m_cacheFileName)) { OnStatusMessage(MessageLevel.Info, "Loading cached input data..."); try { using (FileStream data = File.OpenRead(m_cacheFileName)) { byte[] buffer = new byte[4]; int signalIndexCacheImageSize; int compactMeasurementSize; int totalMeasurements; // Read the signal index cache image size from the file if (data.Read(buffer, 0, 4) != 4) { throw new EndOfStreamException(); } signalIndexCacheImageSize = LittleEndian.ToInt32(buffer, 0); // Resize buffer to accommodate exact signal index cache buffer = new byte[signalIndexCacheImageSize]; // Read the signal index cache image from the file if (data.Read(buffer, 0, signalIndexCacheImageSize) != signalIndexCacheImageSize) { throw new EndOfStreamException(); } // Deserialize the signal index cache signalIndexCache = Serialization.Deserialize <SignalIndexCache>(buffer, SerializationFormat.Binary); // Read the size of each compact measurement from the file if (data.Read(buffer, 0, 4) != 4) { throw new EndOfStreamException(); } compactMeasurementSize = LittleEndian.ToInt32(buffer, 0); // Read the total number of compact measurements from the file if (data.Read(buffer, 0, 4) != 4) { throw new EndOfStreamException(); } totalMeasurements = LittleEndian.ToInt32(buffer, 0); // Resize buffer to accommodate compact measurement if needed (not likely) if (buffer.Length < compactMeasurementSize) { buffer = new byte[compactMeasurementSize]; } // Read each compact measurement image from the file for (int i = 0; i < totalMeasurements; i++) { if (data.Read(buffer, 0, compactMeasurementSize) != compactMeasurementSize) { throw new EndOfStreamException(); } // Parse compact measurement measurement = new CompactMeasurement(signalIndexCache); measurement.ParseBinaryImage(buffer, 0, compactMeasurementSize); m_dbMeasurements.Add(measurement); if (m_dbMeasurements.Count % 50000 == 0) { OnStatusMessage(MessageLevel.Info, $"Loaded {m_dbMeasurements.Count:N0} records so far..."); } } OnStatusMessage(MessageLevel.Info, $"Completed data load in {((Ticks)(DateTime.UtcNow.Ticks - startTime)).ToElapsedTimeString(2)}"); } } catch (Exception ex) { // ReSharper disable once CanBeReplacedWithTryCastAndCheckForNull if (ex is EndOfStreamException) { throw (EndOfStreamException)ex; } throw new EndOfStreamException(ex.Message, ex); } } else { OnStatusMessage(MessageLevel.Info, "Loading database input data..."); const string MeasurementTable = "ActiveMeasurements"; Dictionary <string, string> dataProviderSettings = m_dataProviderString.ParseKeyValuePairs(); Assembly assm = Assembly.Load(dataProviderSettings["AssemblyName"]); Type connectionType = assm.GetType(dataProviderSettings["ConnectionType"]); Dictionary <Guid, MeasurementKey> lookupCache = new Dictionary <Guid, MeasurementKey>(); IDbCommand command; IDataReader dbReader; MeasurementKey key; Guid id; ushort index = 0; connection = (IDbConnection)Activator.CreateInstance(connectionType); connection.ConnectionString = m_dbConnectionString; connection.Open(); command = connection.CreateCommand(); command.CommandText = $"SELECT * FROM {m_dbTableName}"; using (dbReader = command.ExecuteReader()) { while (dbReader.Read()) { measurement = new CompactMeasurement(signalIndexCache); foreach (string fieldName in m_fieldNames.Keys) { object value = dbReader[fieldName]; string propertyName = m_fieldNames[fieldName]; switch (propertyName) { case "Timestamp": // If the value is a timestamp, use the timestamp format // specified by the user when reading the timestamp. if (m_timestampFormat == null) { measurement.Timestamp = long.Parse(value.ToNonNullString()); } else { measurement.Timestamp = DateTime.ParseExact(value.ToNonNullString(), m_timestampFormat, CultureInfo.CurrentCulture); } break; case "ID": if (Guid.TryParse(value.ToString(), out id)) { if (!lookupCache.TryGetValue(id, out key)) { if (DataSource.Tables.Contains(MeasurementTable)) { DataRow[] filteredRows = DataSource.Tables[MeasurementTable].Select($"SignalID = '{id}'"); if (filteredRows.Length > 0) { key = MeasurementKey.LookUpOrCreate(id, filteredRows[0]["ID"].ToString()); } } if (key != MeasurementKey.Undefined) { // Cache measurement key associated with ID lookupCache[id] = key; // Assign a runtime index optimization for distinct measurements signalIndexCache.Reference.TryAdd(index++, key); } } measurement.Metadata = key.Metadata; } break; case "Key": if (MeasurementKey.TryParse(value.ToString(), out key)) { if (!lookupCache.ContainsKey(key.SignalID)) { // Cache measurement key associated with ID lookupCache[key.SignalID] = key; // Assign a runtime index optimization for distinct measurements signalIndexCache.Reference.TryAdd(index++, key); } measurement.Metadata = key.Metadata; } break; case "Value": measurement.Value = Convert.ToDouble(value); break; default: PropertyInfo property = GetAllProperties(typeof(IMeasurement)).FirstOrDefault(propertyInfo => propertyInfo.Name == propertyName); if (property != null) { Type propertyType = property.PropertyType; Type valueType = value.GetType(); // ReSharper disable once UseMethodIsInstanceOfType if (property.PropertyType.IsAssignableFrom(value.GetType())) { property.SetValue(measurement, value, null); } else if (property.PropertyType == typeof(string)) { property.SetValue(measurement, value.ToNonNullString(), null); } else if (valueType == typeof(string)) { MethodInfo parseMethod = propertyType.GetMethod("Parse", new[] { typeof(string) }); if (parseMethod != null && parseMethod.IsStatic) { property.SetValue(measurement, parseMethod.Invoke(null, new[] { value }), null); } } else { string exceptionMessage = $"The type of field {fieldName} could not be converted to the type of property {propertyName}."; OnProcessException(MessageLevel.Warning, new InvalidCastException(exceptionMessage)); } } else { string exceptionMessage = $"The type of field {fieldName} could not be converted to the type of property {propertyName} - no property match was found."; OnProcessException(MessageLevel.Warning, new InvalidCastException(exceptionMessage)); } break; } m_dbMeasurements.Add(measurement); if (m_dbMeasurements.Count % 50000 == 0) { OnStatusMessage(MessageLevel.Info, $"Loaded {m_dbMeasurements.Count:N0} records so far..."); } } } } OnStatusMessage(MessageLevel.Info, "Sorting data by time..."); m_dbMeasurements = m_dbMeasurements.OrderBy(m => (long)m.Timestamp).ToList(); OnStatusMessage(MessageLevel.Info, $"Completed data load in {((Ticks)(DateTime.UtcNow.Ticks - startTime)).ToElapsedTimeString(2)}"); if (m_cacheFileName != null) { OnStatusMessage(MessageLevel.Info, "Caching data for next initialization..."); using (FileStream data = File.OpenWrite(m_cacheFileName)) { byte[] signalIndexCacheImage = Serialization.Serialize(signalIndexCache, SerializationFormat.Binary); int compactMeasurementSize = (new CompactMeasurement(signalIndexCache)).BinaryLength; // Write the signal index cache image size to the file data.Write(LittleEndian.GetBytes(signalIndexCacheImage.Length), 0, 4); // Write the signal index cache image to the file data.Write(signalIndexCacheImage, 0, signalIndexCacheImage.Length); // Write the size of each compact measurement to the file data.Write(LittleEndian.GetBytes(compactMeasurementSize), 0, 4); // Write the total number of compact measurements to the file data.Write(LittleEndian.GetBytes(m_dbMeasurements.Count), 0, 4); // Write each compact measurement image to the file for (int i = 0; i < m_dbMeasurements.Count; i++) { ((ISupportBinaryImage)m_dbMeasurements[i]).CopyBinaryImageToStream(data); } } } } OnStatusMessage(MessageLevel.Info, "Entering data read cycle..."); ThreadPool.QueueUserWorkItem(PublishData); } catch (EndOfStreamException ex) { OnProcessException(MessageLevel.Warning, new EndOfStreamException($"Failed load cached data from {m_cacheFileName} due to file corruption{(string.IsNullOrWhiteSpace(ex.Message) ? "," : ": " + ex.Message + " - ")} cache will be recreated from database")); // If the cached file is corrupt, delete it and load from the database if (File.Exists(m_cacheFileName)) { File.Delete(m_cacheFileName); } m_dbMeasurements.Clear(); GetDbMeasurements(null); } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException("Failed during data load: " + ex.Message, ex)); } finally { if (connection != null) { connection.Close(); } } }
/// <summary> /// Executes the metadata refresh in a synchronous fashion. /// </summary> protected override void ExecuteMetadataRefresh() { if (!Initialized || !Enabled || !SerializeMetadata) { return; } try { using (BrokerRouter router = new BrokerRouter(new KafkaOptions(m_servers) { Log = new TimeSeriesLogger ( (status, args) => OnStatusMessage(MessageLevel.Info, string.Format(status, args)), ex => OnProcessException(MessageLevel.Warning, new InvalidOperationException($"[{MetadataTopic}]: {ex.Message}", ex)) ) })) { // Attempt to retrieve last known metadata record from Kafka if ((object)m_metadata == null) { try { Ticks serializationTime; OnStatusMessage(MessageLevel.Info, "Reading latest time-series metadata records from Kafka..."); m_metadata = TimeSeriesMetadata.ReadFromKafka(router, MetadataTopic, status => OnStatusMessage(MessageLevel.Info, status), out serializationTime); OnStatusMessage(MessageLevel.Info, $"Deserialized {m_metadata.Count:N0} Kafka time-series metadata records, version {m_metadata.Version:N0}, from \"{MetadataTopic}\" serialized at {serializationTime.ToString(MetadataRecord.DateTimeFormat)}"); } catch (Exception ex) { OnStatusMessage(MessageLevel.Warning, $"Failed to read any existing Kafka time-series metadata records from topic \"{MetadataTopic}\": {ex.Message}"); } } // Create new meta-data object based on newly loaded configuration TimeSeriesMetadata metadata = new TimeSeriesMetadata(); try { foreach (DataRow row in DataSource.Tables["ActiveMeasurements"].AsEnumerable()) { MeasurementKey key; if (MeasurementKey.TryParse(row.Field <string>("ID") ?? MeasurementKey.Undefined.ToString(), out key)) { metadata.Records.Add(new MetadataRecord { ID = key.ID, Source = key.Source, UniqueID = row.Field <object>("SignalID").ToString(), PointTag = row.Field <string>("PointTag"), Device = row.Field <string>("Device"), Longitude = row.ConvertField("Longitude", 0.0F), Latitude = row.ConvertField("Latitude", 0.0F), Protocol = row.Field <string>("Protocol"), SignalType = row.Field <string>("SignalType"), EngineeringUnits = row.Field <string>("EngineeringUnits"), PhasorType = row.Field <string>("PhasorType"), Phase = row.Field <string>("Phase"), Description = row.Field <string>("Description"), LastUpdate = row.Field <DateTime>("UpdatedOn").ToString(MetadataRecord.DateTimeFormat) }); } } } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException($"Failed to serialize current time-series metadata records: {ex.Message}", ex)); } if (metadata.Count > 0) { // See if metadata has not been created yet or is different from last known Kafka record if ((object)m_metadata == null || m_metadata.CalculateChecksum() != metadata.CalculateChecksum()) { // Update local metadata reference m_metadata = metadata; // Send updated metadata to Kafka TimeSeriesMetadata.WriteToKafka(m_metadata, router, MetadataTopic); // Cache metadata locally, if configured m_cacheMetadataLocally?.RunOnceAsync(); m_metadataUpdateCount++; OnStatusMessage(MessageLevel.Info, $"Updated \"{MetadataTopic}\" with {m_metadata.Count:N0} Kafka time-series metadata records..."); } else { OnStatusMessage(MessageLevel.Info, $"Latest \"{MetadataTopic}\" is up to date with current time-series metadata records..."); } } else { OnStatusMessage(MessageLevel.Warning, "No available local time-series metadata available to serialize..."); } } } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException($"Failed to update \"{MetadataTopic}\" with current time-series metadata records: {ex.Message}", ex)); } }
/// <summary> /// Executes the metadata refresh in a synchronous fashion. /// </summary> protected override void ExecuteMetadataRefresh() { if (!Initialized || !Enabled || !SerializeMetadata) { return; } try { // Build a JSON post expression with meta-data values to use as post data List <EventData> samples = new List <EventData>(); int size = 0; async Task pushToEventHub() { if (samples.Count > 0) { // Write data to event hub await m_eventHubMetadataClient.SendAsync(samples, MetadataPartitionKey); Interlocked.Increment(ref m_totalMetadataPosts); } samples.Clear(); } foreach (DataRow row in DataSource.Tables["ActiveMeasurements"].AsEnumerable()) { if (MeasurementKey.TryParse(row.Field <string>("ID") ?? MeasurementKey.Undefined.ToString(), out MeasurementKey key)) { // Encode JSON data as UTF8 string jsonData = string.Format(MetadataPostFormat, /* {0} */ (uint)key.ID, /* {1} */ key.Source, /* {2} */ row.Field <object>("SignalID"), /* {3} */ row.Field <string>("PointTag"), /* {4} */ row.Field <string>("Device"), /* {5} */ row.Field <string>("SignalType"), /* {6} */ row.ConvertField("Longitude", 0.0F), /* {7} */ row.ConvertField("Latitude", 0.0F), /* {8} */ row.Field <string>("Description"), /* {9} */ GetEpochMilliseconds(row.Field <DateTime>("UpdatedOn").Ticks) ); byte[] bytes = Encoding.UTF8.GetBytes(jsonData); EventData record = new EventData(bytes); // Keep total post size under 1MB if (size + bytes.Length < PostSizeLimit) { samples.Add(record); } else { pushToEventHub().Wait(); samples.Add(record); size = 0; } size += bytes.Length; } } // Push any remaining events pushToEventHub().Wait(); } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException($"Failed to serialize current time-series metadata records: {ex.Message}", ex)); } }