static void ProcessMeasurements(object state) { while (true) { List <Measurement> measurements = new List <Measurement>(); Measurement measurement; for (int i = 1; i <= MeasurementCount; i++) { measurement = new Measurement { Key = MeasurementKey.LookUpOrCreate("DEVARCHIVE", (uint)i), Value = Random.Between(-65535.0D, 65536.0D), Timestamp = DateTime.UtcNow.Ticks }; measurements.Add(measurement); } publisher.QueueMeasurementsForProcessing(measurements); Thread.Sleep(33); } }
/// <summary> /// Creates a new local system cache from one that was received remotely. /// </summary> /// <param name="dataSource"><see cref="DataSet"/> based data source used to interpret local measurement keys.</param> /// <param name="remoteCache">Deserialized remote signal index cache.</param> public SignalIndexCache(DataSet dataSource, SignalIndexCache remoteCache) { m_subscriberID = remoteCache.SubscriberID; // If active measurements are defined, interpret signal cache in context of current measurement key definitions if (dataSource != null && dataSource.Tables.Contains("ActiveMeasurements")) { DataTable activeMeasurements = dataSource.Tables["ActiveMeasurements"]; m_reference = new ConcurrentDictionary <ushort, MeasurementKey>(); foreach (KeyValuePair <ushort, MeasurementKey> signalIndex in remoteCache.Reference) { Guid signalID = signalIndex.Value.SignalID; DataRow[] filteredRows = activeMeasurements.Select("SignalID = '" + signalID.ToString() + "'"); if (filteredRows.Length > 0) { DataRow row = filteredRows[0]; MeasurementKey key = MeasurementKey.LookUpOrCreate(signalID, row["ID"].ToNonNullString(MeasurementKey.Undefined.ToString())); m_reference.TryAdd(signalIndex.Key, key); } } m_unauthorizedSignalIDs = remoteCache.UnauthorizedSignalIDs; } else { // Just use remote signal index cache as-is if no local configuration exists m_reference = remoteCache.Reference; m_unauthorizedSignalIDs = remoteCache.UnauthorizedSignalIDs; } }
private Measurement ConvertDoubleBinary(DoubleBitBinary meas, uint id, String source) { var m = new Measurement(); m.Key = MeasurementKey.LookUpOrCreate(source, id); switch (meas.Value) { case (DoubleBit.INDETERMINATE): m.Value = 0.0; break; case (DoubleBit.DETERMINED_OFF): m.Value = 1.0; break; case (DoubleBit.DETERMINED_ON): m.Value = 2.0; break; default: m.Value = 3.0; break; } m.Timestamp = DateTime.UtcNow; return(m); }
private Measurement ConvertDoubleBinary(DoubleBitBinary measurement, uint id, string source) { Measurement convertedMeasurement = new Measurement { Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata, Timestamp = measurement.Timestamp.Value }; switch (measurement.Value) { case DoubleBit.INDETERMINATE: convertedMeasurement.Value = 0.0D; break; case DoubleBit.DETERMINED_OFF: convertedMeasurement.Value = 1.0D; break; case DoubleBit.DETERMINED_ON: convertedMeasurement.Value = 2.0D; break; default: convertedMeasurement.Value = 3.0D; break; } return(convertedMeasurement); }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Array of measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <returns>Enumeration of <see cref="IMeasurement"/> values read for time range.</returns> /// <remarks> /// <example> /// <code> /// using (var connection = new Connection("127.0.0.1", "PPA")) /// foreach(var measurement in GetHistorianData(connection, DateTime.UtcNow.AddMinutes(-1.0D), DateTime.UtcNow)) /// Console.WriteLine("{0}:{1} @ {2} = {3}, quality: {4}", measurement.Key.Source, measurement.Key.ID, measurement.Timestamp, measurement.Value, measurement.StateFlags); /// </code> /// </example> /// </remarks> public static IEnumerable <IMeasurement> GetHistorianData(Connection connection, DateTime startTime, DateTime stopTime, IEnumerable <ulong> measurementIDs = null, Resolution resolution = Resolution.Full) { SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { TimeSpan resolutionInterval = resolution.GetInterval(); BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value)) { yield return new Measurement { Metadata = MeasurementKey.LookUpOrCreate(connection.InstanceName, (uint)key.PointID).Metadata, Timestamp = key.TimestampAsDate, Value = value.AsSingle, StateFlags = (MeasurementStateFlags)value.Value3 } } ; } }
private Measurement ConvertBinary(Binary meas, uint id, String source) { var m = new Measurement(); m.Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata; m.Value = meas.Value ? 1.0 : 0.0; m.Timestamp = meas.Timestamp; return(m); }
private Measurement ConvertAnalogOutputStatus(AnalogOutputStatus meas, uint id, String source) { var m = new Measurement(); m.Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata; m.Value = meas.Value; m.Timestamp = meas.Timestamp; return(m); }
private Measurement ConvertFrozenCounter(FrozenCounter measurement, uint id, string source) { return(new Measurement { Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata, Value = measurement.Value, Timestamp = measurement.Timestamp.Value }); }
private Measurement ConvertAnalogOutputStatus(AnalogOutputStatus meas, uint id, String source) { var m = new Measurement(); m.Key = MeasurementKey.LookUpOrCreate(source, id); m.Value = meas.Value; m.Timestamp = DateTime.UtcNow; return(m); }
private Measurement ConvertAnalog(Analog meas, uint id, String source) { var m = new Measurement(); m.Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata; m.Value = meas.Value; m.Timestamp = DateTime.UtcNow; return(m); }
private Measurement ConvertFrozenCounter(FrozenCounter meas, uint id, String source) { var m = new Measurement(); m.Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata; m.Value = meas.Value; m.Timestamp = meas.Timestamp; return(m); }
private Measurement ConvertBinaryOutputStatus(BinaryOutputStatus meas, uint id, String source) { var m = new Measurement(); m.Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata; m.Value = meas.Value ? 1.0 : 0.0; m.Timestamp = DateTime.UtcNow; return(m); }
private Measurement ConvertBinary(Binary meas, uint id, String source) { var m = new Measurement(); m.Key = MeasurementKey.LookUpOrCreate(source, id); m.Value = meas.Value ? 1.0 : 0.0; m.Timestamp = DateTime.UtcNow; return(m); }
private Measurement ConvertBinary(Binary measurement, uint id, string source) { return(new Measurement { Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata, Value = measurement.Value ? 1.0 : 0.0, Timestamp = measurement.Timestamp.Value }); }
private Measurement ConvertAnalogOutputStatus(AnalogOutputStatus measurement, uint id, string source) { return(new Measurement { Metadata = MeasurementKey.LookUpOrCreate(source, id).Metadata, Value = measurement.Value, Timestamp = measurement.Timestamp.Value }); }
private Measurement ConvertFrozenCounter(FrozenCounter meas, uint id, String source) { var m = new Measurement(); m.Key = MeasurementKey.LookUpOrCreate(source, id); m.Value = meas.Value; m.Timestamp = DateTime.UtcNow; return(m); }
/// <summary> /// Returns a new measurement equivalent to the one being wrapped. /// </summary> /// <returns>The wrapped measurement.</returns> public IMeasurement GetMeasurement() { Guid signalID = Guid.Parse(SignalID); IMeasurement measurement = new Measurement() { Metadata = new MeasurementMetadata(MeasurementKey.LookUpOrCreate(signalID, Source, unchecked ((uint)ID)), TagName, Adder, Multiplier, null), Timestamp = Timestamp, Value = Value }; return(measurement); }
/// <summary> /// Returns a new measurement equivalent to the one being wrapped. /// </summary> /// <returns>The wrapped measurement.</returns> public IMeasurement GetMeasurement() { Guid signalID = Guid.Parse(SignalID); IMeasurement measurement = new Measurement() { Adder = Adder, Key = MeasurementKey.LookUpOrCreate(signalID, Source, unchecked ((uint)ID)), Multiplier = Multiplier, TagName = TagName, Timestamp = Timestamp, Value = Value }; return(measurement); }
/// <summary> /// Looks up measurement key from point tag. /// </summary> /// <param name="pointTag">Point tag to lookup.</param> /// <param name="source">Source metadata.</param> /// <returns>Measurement key from source metadata.</returns> /// <remarks> /// This function uses the <see cref="DataTable.Select(string)"/> function which uses a linear /// search algorithm that can be slow for large data sets, it is recommended that any results /// for calls to this function be cached to improve performance. /// </remarks> internal static MeasurementKey KeyFromTag(this string pointTag, DataSet source) { DataRow record = pointTag.MetadataRecordFromTag(source); if ((object)record == null) { return(MeasurementKey.Undefined); } try { return(MeasurementKey.LookUpOrCreate(record["SignalID"].ToNonNullString(Guid.Empty.ToString()).ConvertToType <Guid>(), record["ID"].ToString())); } catch { return(MeasurementKey.Undefined); } }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Comma separated list of measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <returns>Enumeration of <see cref="IMeasurement"/> values read for time range.</returns> /// <example> /// <code> /// using (var connection = new Connection("127.0.0.1", "PPA")) /// foreach(var measurement in GetHistorianData(connection, DateTime.UtcNow.AddMinutes(-1.0D), DateTime.UtcNow)) /// Console.WriteLine("{0}:{1} @ {2} = {3}, quality: {4}", measurement.Key.Source, measurement.Key.ID, measurement.Timestamp, measurement.Value, measurement.StateFlags); /// </code> /// </example> public static IEnumerable <IMeasurement> GetHistorianData(Connection connection, DateTime startTime, DateTime stopTime, string measurementIDs = null, Resolution resolution = Resolution.Full) { SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolution.GetInterval(), new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections if (!string.IsNullOrEmpty(measurementIDs)) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs.Split(',').Select(ulong.Parse)); } // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value)) { yield return new Measurement() { Metadata = MeasurementKey.LookUpOrCreate(connection.InstanceName, (uint)key.PointID).Metadata, Timestamp = key.TimestampAsDate, Value = value.AsSingle, StateFlags = (MeasurementStateFlags)value.Value3 } } ; } }
private void HistorianDataListener_DataExtracted(object sender, EventArgs <IList <IDataPoint> > e) { try { List <IMeasurement> measurements = new List <IMeasurement>(); foreach (IDataPoint dataPoint in e.Argument) { measurements.Add(new Measurement { Metadata = MeasurementKey.LookUpOrCreate(m_historianDataListener.ID, (uint)dataPoint.HistorianID).Metadata, Value = dataPoint.Value, Timestamp = dataPoint.Time }); } OnNewMeasurements(measurements); } catch (Exception ex) { OnProcessException(MessageLevel.Warning, ex); } }
private static void MapMeasurementAttributes(ICollection <IMeasurement> mappedMeasurements, string signalReference, IMeasurement parsedMeasurement) { // Coming into this function the parsed measurement value will only have a "value" and a "timestamp"; // the measurement will not yet be associated with an actual historian measurement ID as the measurement // will have come directly out of the parsed phasor protocol data frame. We take the generated signal // reference and use that to lookup the actual historian measurement ID, source, adder and multipler. IMeasurement definedMeasurement = m_definedMeasurements.GetOrAdd(signalReference, signal => { Guid id = Guid.NewGuid(); return(new Measurement { Key = MeasurementKey.LookUpOrCreate(id, signal, ++measurementID), }); }); // Assign ID and other relevant attributes to the parsed measurement value parsedMeasurement.Key = definedMeasurement.Key; parsedMeasurement.Adder = definedMeasurement.Adder; // Allows for run-time additive measurement value adjustments parsedMeasurement.Multiplier = definedMeasurement.Multiplier; // Allows for run-time mulplicative measurement value adjustments // Add the updated measurement value to the destination measurement collection mappedMeasurements.Add(parsedMeasurement); }
// Retrieves the measurements from the database. private void GetDbMeasurements(object state) { IDbConnection connection = null; // Get measurements from the database. try { SignalIndexCache signalIndexCache = new SignalIndexCache(); CompactMeasurement measurement; long startTime = DateTime.UtcNow.Ticks; if (m_cacheFileName != null && File.Exists(m_cacheFileName)) { OnStatusMessage(MessageLevel.Info, "Loading cached input data..."); try { using (FileStream data = File.OpenRead(m_cacheFileName)) { byte[] buffer = new byte[4]; int signalIndexCacheImageSize; int compactMeasurementSize; int totalMeasurements; // Read the signal index cache image size from the file if (data.Read(buffer, 0, 4) != 4) { throw new EndOfStreamException(); } signalIndexCacheImageSize = LittleEndian.ToInt32(buffer, 0); // Resize buffer to accommodate exact signal index cache buffer = new byte[signalIndexCacheImageSize]; // Read the signal index cache image from the file if (data.Read(buffer, 0, signalIndexCacheImageSize) != signalIndexCacheImageSize) { throw new EndOfStreamException(); } // Deserialize the signal index cache signalIndexCache = Serialization.Deserialize <SignalIndexCache>(buffer, SerializationFormat.Binary); // Read the size of each compact measurement from the file if (data.Read(buffer, 0, 4) != 4) { throw new EndOfStreamException(); } compactMeasurementSize = LittleEndian.ToInt32(buffer, 0); // Read the total number of compact measurements from the file if (data.Read(buffer, 0, 4) != 4) { throw new EndOfStreamException(); } totalMeasurements = LittleEndian.ToInt32(buffer, 0); // Resize buffer to accommodate compact measurement if needed (not likely) if (buffer.Length < compactMeasurementSize) { buffer = new byte[compactMeasurementSize]; } // Read each compact measurement image from the file for (int i = 0; i < totalMeasurements; i++) { if (data.Read(buffer, 0, compactMeasurementSize) != compactMeasurementSize) { throw new EndOfStreamException(); } // Parse compact measurement measurement = new CompactMeasurement(signalIndexCache); measurement.ParseBinaryImage(buffer, 0, compactMeasurementSize); m_dbMeasurements.Add(measurement); if (m_dbMeasurements.Count % 50000 == 0) { OnStatusMessage(MessageLevel.Info, $"Loaded {m_dbMeasurements.Count:N0} records so far..."); } } OnStatusMessage(MessageLevel.Info, $"Completed data load in {((Ticks)(DateTime.UtcNow.Ticks - startTime)).ToElapsedTimeString(2)}"); } } catch (Exception ex) { // ReSharper disable once CanBeReplacedWithTryCastAndCheckForNull if (ex is EndOfStreamException) { throw (EndOfStreamException)ex; } throw new EndOfStreamException(ex.Message, ex); } } else { OnStatusMessage(MessageLevel.Info, "Loading database input data..."); const string MeasurementTable = "ActiveMeasurements"; Dictionary <string, string> dataProviderSettings = m_dataProviderString.ParseKeyValuePairs(); Assembly assm = Assembly.Load(dataProviderSettings["AssemblyName"]); Type connectionType = assm.GetType(dataProviderSettings["ConnectionType"]); Dictionary <Guid, MeasurementKey> lookupCache = new Dictionary <Guid, MeasurementKey>(); IDbCommand command; IDataReader dbReader; MeasurementKey key; Guid id; ushort index = 0; connection = (IDbConnection)Activator.CreateInstance(connectionType); connection.ConnectionString = m_dbConnectionString; connection.Open(); command = connection.CreateCommand(); command.CommandText = $"SELECT * FROM {m_dbTableName}"; using (dbReader = command.ExecuteReader()) { while (dbReader.Read()) { measurement = new CompactMeasurement(signalIndexCache); foreach (string fieldName in m_fieldNames.Keys) { object value = dbReader[fieldName]; string propertyName = m_fieldNames[fieldName]; switch (propertyName) { case "Timestamp": // If the value is a timestamp, use the timestamp format // specified by the user when reading the timestamp. if (m_timestampFormat == null) { measurement.Timestamp = long.Parse(value.ToNonNullString()); } else { measurement.Timestamp = DateTime.ParseExact(value.ToNonNullString(), m_timestampFormat, CultureInfo.CurrentCulture); } break; case "ID": if (Guid.TryParse(value.ToString(), out id)) { if (!lookupCache.TryGetValue(id, out key)) { if (DataSource.Tables.Contains(MeasurementTable)) { DataRow[] filteredRows = DataSource.Tables[MeasurementTable].Select($"SignalID = '{id}'"); if (filteredRows.Length > 0) { key = MeasurementKey.LookUpOrCreate(id, filteredRows[0]["ID"].ToString()); } } if (key != MeasurementKey.Undefined) { // Cache measurement key associated with ID lookupCache[id] = key; // Assign a runtime index optimization for distinct measurements signalIndexCache.Reference.TryAdd(index++, key); } } measurement.Metadata = key.Metadata; } break; case "Key": if (MeasurementKey.TryParse(value.ToString(), out key)) { if (!lookupCache.ContainsKey(key.SignalID)) { // Cache measurement key associated with ID lookupCache[key.SignalID] = key; // Assign a runtime index optimization for distinct measurements signalIndexCache.Reference.TryAdd(index++, key); } measurement.Metadata = key.Metadata; } break; case "Value": measurement.Value = Convert.ToDouble(value); break; default: PropertyInfo property = GetAllProperties(typeof(IMeasurement)).FirstOrDefault(propertyInfo => propertyInfo.Name == propertyName); if (property != null) { Type propertyType = property.PropertyType; Type valueType = value.GetType(); // ReSharper disable once UseMethodIsInstanceOfType if (property.PropertyType.IsAssignableFrom(value.GetType())) { property.SetValue(measurement, value, null); } else if (property.PropertyType == typeof(string)) { property.SetValue(measurement, value.ToNonNullString(), null); } else if (valueType == typeof(string)) { MethodInfo parseMethod = propertyType.GetMethod("Parse", new[] { typeof(string) }); if (parseMethod != null && parseMethod.IsStatic) { property.SetValue(measurement, parseMethod.Invoke(null, new[] { value }), null); } } else { string exceptionMessage = $"The type of field {fieldName} could not be converted to the type of property {propertyName}."; OnProcessException(MessageLevel.Warning, new InvalidCastException(exceptionMessage)); } } else { string exceptionMessage = $"The type of field {fieldName} could not be converted to the type of property {propertyName} - no property match was found."; OnProcessException(MessageLevel.Warning, new InvalidCastException(exceptionMessage)); } break; } m_dbMeasurements.Add(measurement); if (m_dbMeasurements.Count % 50000 == 0) { OnStatusMessage(MessageLevel.Info, $"Loaded {m_dbMeasurements.Count:N0} records so far..."); } } } } OnStatusMessage(MessageLevel.Info, "Sorting data by time..."); m_dbMeasurements = m_dbMeasurements.OrderBy(m => (long)m.Timestamp).ToList(); OnStatusMessage(MessageLevel.Info, $"Completed data load in {((Ticks)(DateTime.UtcNow.Ticks - startTime)).ToElapsedTimeString(2)}"); if (m_cacheFileName != null) { OnStatusMessage(MessageLevel.Info, "Caching data for next initialization..."); using (FileStream data = File.OpenWrite(m_cacheFileName)) { byte[] signalIndexCacheImage = Serialization.Serialize(signalIndexCache, SerializationFormat.Binary); int compactMeasurementSize = (new CompactMeasurement(signalIndexCache)).BinaryLength; // Write the signal index cache image size to the file data.Write(LittleEndian.GetBytes(signalIndexCacheImage.Length), 0, 4); // Write the signal index cache image to the file data.Write(signalIndexCacheImage, 0, signalIndexCacheImage.Length); // Write the size of each compact measurement to the file data.Write(LittleEndian.GetBytes(compactMeasurementSize), 0, 4); // Write the total number of compact measurements to the file data.Write(LittleEndian.GetBytes(m_dbMeasurements.Count), 0, 4); // Write each compact measurement image to the file for (int i = 0; i < m_dbMeasurements.Count; i++) { ((ISupportBinaryImage)m_dbMeasurements[i]).CopyBinaryImageToStream(data); } } } } OnStatusMessage(MessageLevel.Info, "Entering data read cycle..."); ThreadPool.QueueUserWorkItem(PublishData); } catch (EndOfStreamException ex) { OnProcessException(MessageLevel.Warning, new EndOfStreamException($"Failed load cached data from {m_cacheFileName} due to file corruption{(string.IsNullOrWhiteSpace(ex.Message) ? "," : ": " + ex.Message + " - ")} cache will be recreated from database")); // If the cached file is corrupt, delete it and load from the database if (File.Exists(m_cacheFileName)) { File.Delete(m_cacheFileName); } m_dbMeasurements.Clear(); GetDbMeasurements(null); } catch (Exception ex) { OnProcessException(MessageLevel.Warning, new InvalidOperationException("Failed during data load: " + ex.Message, ex)); } finally { if (connection != null) { connection.Close(); } } }
/// <summary> /// Initializes the <see cref="SignalIndexCache"/> by parsing the specified <paramref name="buffer"/> containing a binary image. /// </summary> /// <param name="buffer">Buffer containing binary image to parse.</param> /// <param name="startIndex">0-based starting index in the <paramref name="buffer"/> to start parsing.</param> /// <param name="length">Valid number of bytes within <paramref name="buffer"/> to read from <paramref name="startIndex"/>.</param> /// <returns>The number of bytes used for initialization in the <paramref name="buffer"/> (i.e., the number of bytes parsed).</returns> public int ParseBinaryImage(byte[] buffer, int startIndex, int length) { int binaryLength; int offset = startIndex; int referenceCount; ushort signalIndex; Guid signalID; int sourceSize; string source; uint id; int unauthorizedIDCount; if ((object)m_encoding == null) { throw new InvalidOperationException("Attempt to parse binary image of signal index cache without setting a character encoding."); } buffer.ValidateParameters(startIndex, length); if (length < 4) { return(0); } // Byte size of cache binaryLength = BigEndian.ToInt32(buffer, offset); offset += 4; if (length < binaryLength) { return(0); } // We know we have enough data so we can empty the reference cache m_reference.Clear(); // Subscriber ID m_subscriberID = EndianOrder.BigEndian.ToGuid(buffer, offset); offset += 16; // Number of references referenceCount = BigEndian.ToInt32(buffer, offset); offset += 4; for (int i = 0; i < referenceCount; i++) { // Signal index signalIndex = BigEndian.ToUInt16(buffer, offset); offset += 2; // Signal ID signalID = EndianOrder.BigEndian.ToGuid(buffer, offset); offset += 16; // Source sourceSize = BigEndian.ToInt32(buffer, offset); offset += 4; source = m_encoding.GetString(buffer, offset, sourceSize); offset += sourceSize; // ID id = BigEndian.ToUInt32(buffer, offset); offset += 4; m_reference[signalIndex] = MeasurementKey.LookUpOrCreate(signalID, source, id); } // Number of unauthorized IDs unauthorizedIDCount = BigEndian.ToInt32(buffer, offset); m_unauthorizedSignalIDs = new Guid[unauthorizedIDCount]; offset += 4; for (int i = 0; i < unauthorizedIDCount; i++) { // Unauthorized ID m_unauthorizedSignalIDs[i] = EndianOrder.BigEndian.ToGuid(buffer, offset); offset += 16; } return(binaryLength); }
private static void MapMeasurementAttributes(ICollection <IMeasurement> mappedMeasurements, string signalReference, IMeasurement parsedMeasurement) { // Coming into this function the parsed measurement value will only have a "value" and a "timestamp"; // the measurement will not yet be associated with an actual historian measurement ID as the measurement // will have come directly out of the parsed phasor protocol data frame. We take the generated signal // reference and use that to lookup the actual historian measurement ID, source, adder and multipler. MeasurementMetadata definedMeasurement = m_definedMeasurements.GetOrAdd(signalReference, signal => MeasurementKey.LookUpOrCreate(Guid.NewGuid(), signal, ++measurementID).Metadata); // Assign ID and other relevant attributes to the parsed measurement value parsedMeasurement.Metadata = definedMeasurement; // Add the updated measurement value to the destination measurement collection mappedMeasurements.Add(parsedMeasurement); }
/// <summary> /// Initializes <see cref="SerializableMeasurement"/> from the specified binary image. /// </summary> /// <param name="buffer">Buffer containing binary image to parse.</param> /// <param name="startIndex">0-based starting index in the <paramref name="buffer"/> to start parsing.</param> /// <param name="length">Valid number of bytes within <paramref name="buffer"/> from <paramref name="startIndex"/>.</param> /// <returns>The number of bytes used for initialization in the <paramref name="buffer"/> (i.e., the number of bytes parsed).</returns> /// <exception cref="InvalidOperationException">Not enough buffer available to deserialize measurement.</exception> /// <exception cref="ArgumentNullException"><paramref name="buffer"/> is null.</exception> /// <exception cref="ArgumentOutOfRangeException"> /// <paramref name="startIndex"/> or <paramref name="length"/> is less than 0 -or- /// <paramref name="startIndex"/> and <paramref name="length"/> will exceed <paramref name="buffer"/> length. /// </exception> public int ParseBinaryImage(byte[] buffer, int startIndex, int length) { buffer.ValidateParameters(startIndex, length); if (length < FixedLength) { throw new InvalidOperationException("Not enough buffer available to deserialize measurement"); } int size, index = startIndex; uint keyID; string keySource = ""; // Decode key ID keyID = BigEndian.ToUInt32(buffer, index); index += 4; // Decode key source string length size = BigEndian.ToInt32(buffer, index); index += 4; // Decode key source string if (size > 0) { keySource = m_encoding.GetString(buffer, index, size); index += size; } // Decode signal ID Guid signalID = EndianOrder.BigEndian.ToGuid(buffer, index); index += 16; // Apply parsed key changes Key = MeasurementKey.LookUpOrCreate(signalID, keySource, keyID); // Decode tag name string length size = BigEndian.ToInt32(buffer, index); index += 4; // Decode tag name string if (size > 0) { TagName = m_encoding.GetString(buffer, index, size); index += size; } else { TagName = null; } // Decode value Value = BigEndian.ToDouble(buffer, index); index += 8; // Decode adder Adder = BigEndian.ToDouble(buffer, index); index += 8; // Decode multiplier Multiplier = BigEndian.ToDouble(buffer, index); index += 8; // Decode timestamp Timestamp = BigEndian.ToInt64(buffer, index); index += 8; // Decode state flags StateFlags = (MeasurementStateFlags)BigEndian.ToUInt32(buffer, index); index += 4; return(index - startIndex); }
/// <summary> /// Reads values from the connection string and prepares this <see cref="PIRTInputAdapter"/> for connecting to PI /// </summary> public override void Initialize() { base.Initialize(); m_measurements = new List <IMeasurement>(); Dictionary <string, string> settings = Settings; string setting; if (!settings.TryGetValue("ServerName", out m_serverName)) { throw new InvalidOperationException("Server name is a required setting for PI connections. Please add a server in the format servername=myservername to the connection string."); } if (settings.TryGetValue("UserName", out setting)) { m_userName = setting; } else { m_userName = null; } if (settings.TryGetValue("Password", out setting)) { m_password = setting; } else { m_password = null; } if (settings.TryGetValue("ConnectTimeout", out setting)) { m_connectTimeout = Convert.ToInt32(setting); } else { m_connectTimeout = PIConnection.DefaultConnectTimeout; } if (!settings.TryGetValue("AutoAddOutput", out setting)) { AutoAddOutput = false; } else { AutoAddOutput = bool.Parse(setting); } if (settings.TryGetValue("UseEventPipes", out setting)) { UseEventPipes = bool.Parse(setting); } else { UseEventPipes = true; } if (settings.TryGetValue("QueryTimeSpan", out setting)) { QueryTimeSpan = Convert.ToInt32(setting); } else { QueryTimeSpan = 5; } if (AutoAddOutput) { var measurements = from row in DataSource.Tables["ActiveMeasurements"].AsEnumerable() where row["PROTOCOL"].ToString() == "PI" select row; List <IMeasurement> outputMeasurements = new List <IMeasurement>(); foreach (DataRow row in measurements) { var measurement = new Measurement(); var signalID = new Guid(row["SIGNALID"].ToString()); measurement.Key = MeasurementKey.LookUpOrCreate(signalID, row["ID"].ToString()); outputMeasurements.Add(measurement); } OutputMeasurements = outputMeasurements.ToArray(); OnOutputMeasurementsUpdated(); } }
// Process next data read private void m_readTimer_Elapsed(object sender, ElapsedEventArgs e) { List <IMeasurement> measurements = new List <IMeasurement>(); if (Monitor.TryEnter(m_readTimer)) { try { IDataPoint currentPoint = m_dataReader.Current; long timestamp = currentPoint.Time.ToDateTime().Ticks; MeasurementKey key; if (m_publicationTime == 0) { m_publicationTime = timestamp; } // Set next reasonable publication time while (m_publicationTime < timestamp) { m_publicationTime += m_publicationInterval; } do { // Lookup measurement key for this point key = MeasurementKey.LookUpOrCreate(m_instanceName, unchecked ((uint)currentPoint.HistorianID)); // Add current measurement to the collection for publication measurements.Add(new Measurement { Metadata = key.Metadata, Timestamp = m_simulateTimestamp ? DateTime.UtcNow.Ticks : timestamp, Value = currentPoint.Value, StateFlags = currentPoint.Quality.MeasurementQuality() }); // Attempt to move to next record if (m_dataReader.MoveNext()) { // Read record value currentPoint = m_dataReader.Current; timestamp = currentPoint.Time.ToDateTime().Ticks; } else { if (timestamp < m_stopTime.ToDateTime().Ticks&& m_startTime.ToDateTime().Ticks < timestamp) { // Could be attempting read with a future end time - in these cases attempt to re-read current data // from now to end time in case any new data as been archived in the mean-time m_startTime = new TimeTag(timestamp + Ticks.PerMillisecond); m_dataReader = m_archiveReader.ReadData(m_historianIDs, m_startTime, m_stopTime).GetEnumerator(); if (!m_dataReader.MoveNext()) { // Finished reading all available data m_readTimer.Enabled = false; if (m_autoRepeat) { ThreadPool.QueueUserWorkItem(StartDataReader); } else { OnProcessingComplete(); } } } else { // Finished reading all available data m_readTimer.Enabled = false; if (m_autoRepeat) { ThreadPool.QueueUserWorkItem(StartDataReader); } else { OnProcessingComplete(); } } break; } }while (timestamp <= m_publicationTime); } catch (InvalidOperationException) { // Pooled timer thread executed after last read, verify timer has stopped m_readTimer.Enabled = false; } finally { Monitor.Exit(m_readTimer); } } // Publish all measurements for this time interval if (measurements.Count > 0) { OnNewMeasurements(measurements); } }
// Run Query in Haoop private void m_timer_Elapsed(object sender, ElapsedEventArgs e) { List <IMeasurement> measurements = new List <IMeasurement>(); OnStatusMessage(MessageLevel.Info, "Connecting to Hadoop DB for update"); if (Monitor.TryEnter(m_timer)) { try { bool addTicks = !string.IsNullOrEmpty(SubSecondField); //Connect to DataBase using (OdbcConnection connection = new OdbcConnection(HadoopConnectionString)) { m_currNum = 0; int nPoints = 0; foreach (Guid guid in m_queryParameter.Keys) { Ticks newerThan; m_currNum++; nPoints = 0; lock (s_TimeStampUpdateLock) { using (FileBackedDictionary <Guid, Ticks> dictionary = new FileBackedDictionary <Guid, Ticks>(TimeStampUpdatefile)) { if (!dictionary.TryGetValue(guid, out newerThan)) { newerThan = m_oldestTimestamp; } } } object[] param = { newerThan.ToString("yyyy-MM-dd hh:mm:ss") }; param = param.Concat(m_queryParameter[guid]).ToArray(); DataTable table = connection.RetrieveData(string.Format(m_query, param)); foreach (DataRow row in table.Rows) { Measurement measurement = new Measurement { Metadata = MeasurementKey.LookUpOrCreate(guid, "").Metadata }; measurement.Value = row.AsDouble(0) ?? double.NaN; measurement.Timestamp = DateTime.Parse(row.AsString(1)); // This is only down to Seconds accuracy so we do make sure we are only keeping the seconds here measurement.Timestamp = measurement.Timestamp - measurement.Timestamp.DistanceBeyondSecond(); if (addTicks) { measurement.Timestamp = measurement.Timestamp + row.AsInt64(2) ?? 0; } if (measurement.Timestamp <= newerThan) { continue; } measurements.Add(measurement); nPoints++; if (measurement.Timestamp > newerThan) { newerThan = measurement.Timestamp; } } lock (s_TimeStampUpdateLock) { using (FileBackedDictionary <Guid, Ticks> dictionary = new FileBackedDictionary <Guid, Ticks>(TimeStampUpdatefile)) { if (dictionary.Keys.Contains(guid)) { dictionary[guid] = newerThan; } else { dictionary.Add(guid, newerThan); } } } m_lastConnected = DateTime.UtcNow; if (m_currNum % 20 == 0) { OnStatusMessage(MessageLevel.Info, $"Got Measurements for {m_currNum} out of {m_nTags} Tags"); OnStatusMessage(MessageLevel.Info, $"Obtained {nPoints} Points For Tag {guid} up to {newerThan:dd/MM/yyyy hh:mm:ss}"); } } } } catch (InvalidOperationException ex) { // Pooled timer thread executed after last read, verify timer has stopped m_timer.Enabled = false; OnProcessException(MessageLevel.Warning, ex); } catch (Exception ex) { OnProcessException(MessageLevel.Error, ex); } finally { Monitor.Exit(m_timer); } } // Publish all measurements for this time interval m_num = measurements.Count; OnStatusMessage(MessageLevel.Info, $"Disconnected from Hadoop with a total of {m_num} Points"); if (measurements.Count > 0) { OnNewMeasurements(measurements); } }
// Attempt to read the next record private bool ReadNextRecord(long currentTime) { try { List <IMeasurement> newMeasurements = new List <IMeasurement>(); long fileTime = 0; int timestampColumn = 0; string[] fields = m_inStream.ReadLine().ToNonNullString().Split(','); if (m_inStream.EndOfStream || fields.Length < m_columns.Count) { return(false); } // Read time from Timestamp column in transverse mode if (m_transverse) { if (m_simulateTimestamp) { fileTime = currentTime; } else { timestampColumn = m_columnMappings.First(kvp => string.Compare(kvp.Value.TagName, "Timestamp", StringComparison.OrdinalIgnoreCase) == 0).Key; fileTime = long.Parse(fields[timestampColumn]); } } for (int i = 0; i < m_measurementsPerInterval; i++) { IMeasurement measurement; if (m_transverse) { // No measurement will be defined for timestamp column if (i == timestampColumn) { continue; } if (m_columnMappings.TryGetValue(i, out measurement)) { measurement = Measurement.Clone(measurement); measurement.Value = double.Parse(fields[i]); } else { measurement = new Measurement(); measurement.Metadata = MeasurementKey.Undefined.Metadata; measurement.Value = double.NaN; } if (m_simulateTimestamp) { measurement.Timestamp = currentTime; } else if (m_columns.ContainsKey("Timestamp")) { measurement.Timestamp = fileTime; } } else { measurement = new Measurement(); if (m_columns.ContainsKey("Signal ID")) { Guid measurementID = new Guid(fields[m_columns["Signal ID"]]); if (m_columns.ContainsKey("Measurement Key")) { measurement.Metadata = MeasurementKey.LookUpOrCreate(measurementID, fields[m_columns["Measurement Key"]]).Metadata; } else { measurement.Metadata = MeasurementKey.LookUpBySignalID(measurementID).Metadata; } } else if (m_columns.ContainsKey("Measurement Key")) { measurement.Metadata = MeasurementKey.Parse(fields[m_columns["Measurement Key"]]).Metadata; } if (m_simulateTimestamp) { measurement.Timestamp = currentTime; } else if (m_columns.ContainsKey("Timestamp")) { measurement.Timestamp = long.Parse(fields[m_columns["Timestamp"]]); } if (m_columns.ContainsKey("Value")) { measurement.Value = double.Parse(fields[m_columns["Value"]]); } } newMeasurements.Add(measurement); } OnNewMeasurements(newMeasurements); } catch (Exception ex) { OnProcessException(MessageLevel.Warning, ex); } return(true); }