protected bool ReadWhileCatchAll(TKey key, TValue value, TKey upperBounds, MatchFilterBase <TKey, TValue> filter) { //If there are no more records in the current node. if (IndexOfNextKeyValue >= RecordCount) { //If the last leaf node, return false if (RightSiblingNodeIndex == uint.MaxValue) { key.Clear(); value.Clear(); Dispose(); return(false); } LoadNode(RightSiblingNodeIndex); } //if the pointer data is no longer valid, refresh the pointer if (Stream.PointerVersion != PointerVersion) { RefreshPointer(); } //Reads the next key in the sequence. if (UpperKey.IsLessThan(upperBounds)) { return(InternalRead(key, value, filter)); } return(InternalReadWhile(key, value, upperBounds, filter)); }
private IEnumerable <TrendingDataPoint> Read(IEnumerable <ulong> measurementIDs, DateTime startTime, DateTime stopTime) { SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); if ((object)measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = m_database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { m_treeStreams.Add(stream); while (stream.Read(key, value)) { yield return(new TrendingDataPoint() { ChannelID = (int)key.PointID.HighDoubleWord(), SeriesID = (SeriesID)(int)key.PointID.LowDoubleWord(), Timestamp = key.TimestampAsDate, Value = value.AsSingle }); } } }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Array of measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <returns>Enumeration of <see cref="IMeasurement"/> values read for time range.</returns> /// <remarks> /// <example> /// <code> /// using (var connection = new Connection("127.0.0.1", "PPA")) /// foreach(var measurement in GetHistorianData(connection, DateTime.UtcNow.AddMinutes(-1.0D), DateTime.UtcNow)) /// Console.WriteLine("{0}:{1} @ {2} = {3}, quality: {4}", measurement.Key.Source, measurement.Key.ID, measurement.Timestamp, measurement.Value, measurement.StateFlags); /// </code> /// </example> /// </remarks> public static IEnumerable <IMeasurement> GetHistorianData(Connection connection, DateTime startTime, DateTime stopTime, IEnumerable <ulong> measurementIDs = null, Resolution resolution = Resolution.Full) { SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { TimeSpan resolutionInterval = resolution.GetInterval(); BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value)) { yield return new Measurement { Metadata = MeasurementKey.LookUpOrCreate(connection.InstanceName, (uint)key.PointID).Metadata, Timestamp = key.TimestampAsDate, Value = value.AsSingle, StateFlags = (MeasurementStateFlags)value.Value3 } } ; } }
public void TestUlongHashSet() { List <ulong> list = new List <ulong>(); list.Add(13242345234523412341ul); MatchFilterBase <HistorianKey, HistorianValue> pointId = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(list); if (!pointId.GetType().FullName.Contains("ULongHashSet")) { throw new Exception("Wrong type"); } using (BinaryStream bs = new BinaryStream(allocatesOwnMemory: true)) { bs.Write(pointId.FilterType); pointId.Save(bs); bs.Position = 0; MatchFilterBase <HistorianKey, HistorianValue> filter = Library.Filters.GetMatchFilter <HistorianKey, HistorianValue>(bs.ReadGuid(), bs); if (!filter.GetType().FullName.Contains("ULongHashSet")) { throw new Exception("Wrong type"); } } }
private bool ReadWhileFollowupActions(TKey key, TValue value, MatchFilterBase <TKey, TValue> filter) { //There are certain followup requirements when a ReadWhile method returns false. //Condition 1: // The end of the node has been reached. //Response: // It returned false to allow for additional checks such as timeouts to occur. // Do Nothing. // //Condition 2: // The archive stream may no longer be in order and needs to be checked //Response: // Resort the archive stream // //Condition 3: // The end of the frame has been reached //Response: // Advance to the next frame // Also test the edge case where the current point might be equal to the end of the frame // since this is an inclusive filter and ReadWhile is exclusive. // If it's part of the frame, return true after Advancing the frame and the point. // //Update the cached values for the table so proper analysis can be done. m_firstTable.UpdateCachedValue(); //Check Condition 1 if (m_firstTable.CacheIsValid && m_firstTable.CacheKey.IsLessThan(m_readWhileUpperBounds)) { return(false); } //Since condition 2 and 3 can occur at the same time, verifying the sort of the Archive Stream is a good thing to do. VerifyArchiveStreamSortingOrder(); if (EOS) { return(false); } //Check if Condition 3's exception occured. if (m_firstTable.CacheKey.IsEqualTo(m_keySeekFilter.EndOfFrame)) { //This is the exception clause. I will advance the frame, but will still need to return the current point. m_firstTable.Scanner.Read(key, value); AdvanceSeekableFilter(true, key); SetReadWhileUpperBoundsValue(); return(filter is null || filter.Contains(key, value)); } //Check if condition 3 occured if (m_firstTable.CacheKey.IsGreaterThan(m_keySeekFilter.EndOfFrame)) { AdvanceSeekableFilter(true, m_firstTable.CacheKey); SetReadWhileUpperBoundsValue(); } return(false); }
/// <summary> /// Using the provided filter, continues to advance the stream /// but stops short of returning the point that is equal to /// the provided key. /// </summary> /// <param name="key">Where to store the key</param> /// <param name="value">Where to store the value</param> /// <param name="upperBounds">the test condition. Will return false if the returned point would have /// exceeded this value</param> /// <param name="filter">the filter to apply to the reading.</param> /// <returns> /// Returns true if the point returned is valid. /// Returns false if: /// The point read is greater than or equal to <see cref="upperBounds"/>. /// The end of the stream is reached. /// The end of the current node has been reached. /// </returns> public virtual bool ReadWhile(TKey key, TValue value, TKey upperBounds, MatchFilterBase <TKey, TValue> filter) { if (Stream.PointerVersion == PointerVersion && IndexOfNextKeyValue < RecordCount) { if (UpperKey.IsLessThan(upperBounds)) { return(InternalRead(key, value, filter)); } return(InternalReadWhile(key, value, upperBounds, filter)); } return(ReadWhileCatchAll(key, value, upperBounds, filter)); }
/// <summary> /// Creates a new <see cref="ReportHistorianReader"/>. /// </summary> /// <param name="server">Snapserver to connect to <see cref="SnapServer"/>.</param> /// <param name="instanceName">Name of the instance to connect to.</param> /// <param name="startTime">Starttime.</param> /// <param name="endTime">Endtime.</param> /// <param name="frameRate">SamplingRate of the signal.</param> /// <param name="pointIDs">PointIDs to be collected.</param> public ReportHistorianReader(SnapServer server, string instanceName, DateTime startTime, DateTime endTime, int frameRate, IEnumerable <ulong> pointIDs) { m_client = SnapClient.Connect(server); m_database = m_client.GetDatabase <HistorianKey, HistorianValue>(instanceName); m_key = new HistorianKey(); m_value = new HistorianValue(); SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(DataPoint.RoundTimestamp(startTime, frameRate), DataPoint.RoundTimestamp(endTime, frameRate)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); m_stream = m_database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); }
public static IEnumerable GetHistorianDataSampled(SqlString historianServer, SqlString instanceName, DateTime startTime, DateTime stopTime, TimeSpan interval, [SqlFacet(MaxSize = -1)] SqlString measurementIDs) { const int DefaultHistorianPort = 38402; if (historianServer.IsNull || string.IsNullOrEmpty(historianServer.Value)) { throw new ArgumentNullException("historianServer", "Missing historian server parameter"); } if (instanceName.IsNull || string.IsNullOrEmpty(instanceName.Value)) { throw new ArgumentNullException("instanceName", "Missing historian instance name parameter"); } if (startTime > stopTime) { throw new ArgumentException("Invalid time range specified", "startTime"); } string[] parts = historianServer.Value.Split(':'); string hostName = parts[0]; int port; if (parts.Length < 2 || !int.TryParse(parts[1], out port)) { port = DefaultHistorianPort; } using (HistorianClient client = new HistorianClient(hostName, port)) using (ClientDatabaseBase <HistorianKey, HistorianValue> reader = client.GetDatabase <HistorianKey, HistorianValue>(instanceName.Value)) { var timeFilter = interval.Ticks == 0 ? TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime) : TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); if (!measurementIDs.IsNull && !string.IsNullOrEmpty(measurementIDs.Value)) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs.Value.Split(',').Select(ulong.Parse)); } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = reader.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value)) { yield return(new Measurement(key.PointID, key.TimestampAsDate, value.AsSingle)); } } } }
public SnapDBClient(string hostAddress, int port, string instanceName, ulong startTime, ulong endTime, int frameRate, IEnumerable <ulong> pointIDs) { m_client = new HistorianClient(hostAddress, port); m_database = m_client.GetDatabase <HistorianKey, HistorianValue>(instanceName); m_key = new HistorianKey(); m_value = new HistorianValue(); SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(DataPoint.RoundTimestamp(startTime, frameRate), DataPoint.RoundTimestamp(endTime, frameRate)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); m_stream = m_database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); }
private SequentialReaderStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter, WorkerThreadSynchronization workerThreadSynchronization) { if (m_disposed) { throw new ObjectDisposedException(GetType().FullName); } Stats.QueriesExecuted++; return(new SequentialReaderStream <TKey, TValue>(m_archiveList, readerOptions, keySeekFilter, keyMatchFilter, workerThreadSynchronization)); }
public Ticks ReadBackHistorianData(HistorianIArchive archive, Action <int> updateProgressBar) { IEnumerable <ulong> points; if (m_settings.ReadFromCsv) { points = m_indexToPointIDLookup.Skip(1); // First value is always 0 because the timestamp is the first column } else { points = m_points; } if (points == null) { ShowMessage("Point list not initialized"); return(new Ticks(0)); } int count = 0; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); TreeStream <HistorianKey, HistorianValue> m_stream; SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(DataPoint.RoundTimestamp(m_startTime, m_settings.FrameRate), DataPoint.RoundTimestamp(m_endTime, m_settings.FrameRate)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(points); m_stream = archive.ClientDatabase.Read(GSF.Snap.Services.Reader.SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); int messageInterval = points.Count() * m_settings.MessageInterval; DateTime startTime = DateTime.UtcNow; while (m_stream.Read(key, value)) { count++; if (count % messageInterval == 0) { PercentComplete = (int)((1.0D - (new Ticks(m_endTime.Ticks - (long)key.Timestamp).ToSeconds() / m_timeRange)) * 100.0D); ShowMessage($"{Environment.NewLine}{count} points read back so far, averaging {(count / (DateTime.UtcNow - startTime).TotalSeconds):N0} points per second."); updateProgressBar(PercentComplete); } } return(DateTime.UtcNow - startTime); }
protected override unsafe bool InternalRead(TKey key, TValue value, MatchFilterBase <TKey, TValue> filter) { TryAgain: byte *ptr = Pointer + IndexOfNextKeyValue * m_keyValueSize; key.Read(ptr); value.Read(ptr + KeySize); IndexOfNextKeyValue++; if (filter.Contains(key, value)) { return(true); } if (IndexOfNextKeyValue >= RecordCount) { return(false); } goto TryAgain; }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Comma separated list of measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <returns>Enumeration of <see cref="IMeasurement"/> values read for time range.</returns> /// <example> /// <code> /// using (var connection = new Connection("127.0.0.1", "PPA")) /// foreach(var measurement in GetHistorianData(connection, DateTime.UtcNow.AddMinutes(-1.0D), DateTime.UtcNow)) /// Console.WriteLine("{0}:{1} @ {2} = {3}, quality: {4}", measurement.Key.Source, measurement.Key.ID, measurement.Timestamp, measurement.Value, measurement.StateFlags); /// </code> /// </example> public static IEnumerable <IMeasurement> GetHistorianData(Connection connection, DateTime startTime, DateTime stopTime, string measurementIDs = null, Resolution resolution = Resolution.Full) { SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolution.GetInterval(), new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections if (!string.IsNullOrEmpty(measurementIDs)) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs.Split(',').Select(ulong.Parse)); } // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value)) { yield return new Measurement() { Metadata = MeasurementKey.LookUpOrCreate(connection.InstanceName, (uint)key.PointID).Metadata, Timestamp = key.TimestampAsDate, Value = value.AsSingle, StateFlags = (MeasurementStateFlags)value.Value3 } } ; } }
protected override bool InternalRead(TKey key, TValue value, MatchFilterBase <TKey, TValue> filter) { TryAgain: byte *stream = Pointer + m_nextOffset; int length = m_encoding.Decode(stream, m_prevKey, m_prevValue, key, value, out _); key.CopyTo(m_prevKey); value.CopyTo(m_prevValue); m_nextOffset += length; IndexOfNextKeyValue++; if (filter.Contains(key, value)) { return(true); } if (IndexOfNextKeyValue >= RecordCount) { return(false); } goto TryAgain; }
/// <summary> /// Queries the provided signals within a the time described by the <see cref="QueryFilterTimestamp"/>. /// With this method, the signals will be strong typed and therefore can be converted. /// </summary> /// <param name="database"></param> /// <param name="timestamps">a <see cref="QueryFilterTimestamp"/> that describes how a signal will be parsed</param> /// <param name="signals">an IEnumerable of all of the signals to query as part of the results set.</param> /// <param name="readerOptions">The options that will be used when querying this data.</param> /// <returns></returns> public static Dictionary <ulong, SignalDataBase> GetSignals(this IDatabaseReader <HistorianKey, HistorianValue> database, SeekFilterBase <HistorianKey> timestamps, IEnumerable <ISignalWithType> signals, SortedTreeEngineReaderOptions readerOptions) { Dictionary <ulong, SignalDataBase> results = new Dictionary <ulong, SignalDataBase>(); foreach (ISignalWithType pt in signals) { if (pt.HistorianId.HasValue) { if (!results.ContainsKey(pt.HistorianId.Value)) { results.Add(pt.HistorianId.Value, new SignalData(pt.Functions)); } } } HistorianKey key = new HistorianKey(); HistorianValue hvalue = new HistorianValue(); MatchFilterBase <HistorianKey, HistorianValue> keyParser = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(signals.Where((x) => x.HistorianId.HasValue).Select((x) => x.HistorianId.Value)); TreeStream <HistorianKey, HistorianValue> stream = database.Read(readerOptions, timestamps, keyParser); ulong time, point, quality, value; while (stream.Read(key, hvalue)) { time = key.Timestamp; point = key.PointID; quality = hvalue.Value3; value = hvalue.Value1; results.AddSignalIfExists(time, point, value); } foreach (SignalDataBase signal in results.Values) { signal.Completed(); } return(results); }
/// <summary> /// Reads data from the SortedTreeEngine with the provided read options and server side filters. /// </summary> /// <param name="readerOptions">read options supplied to the reader. Can be null.</param> /// <param name="keySeekFilter">a seek based filter to follow. Can be null.</param> /// <param name="keyMatchFilter">a match based filer to follow. Can be null.</param> /// <returns>A stream that will read the specified data.</returns> public abstract TreeStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter);
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementID">Measurement ID to test for data continuity.</param> /// <param name="resolution">Resolution for testing data.</param> /// <param name="expectedFullResolutionTicks">Expected number of ticks per interval at full resolution, e.g., 33,333 = 1/30 of a second representing a sampling interval of 30 times per second.</param> /// <returns>Enumeration of valid data ranges for specified time range.</returns> /// <remarks> /// 1 tick = 100 nanoseconds. /// </remarks> public static IEnumerable <Tuple <DateTime, DateTime> > GetContiguousDataRegions(Connection connection, DateTime startTime, DateTime stopTime, ulong measurementID, Resolution resolution, long expectedFullResolutionTicks = 333333) { // Setup time-range and point ID selections SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromPointID <HistorianKey, HistorianValue>(measurementID); HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); TimeSpan interval, tolerance; // Set data scan resolution if (resolution == Resolution.Full) { interval = new TimeSpan(expectedFullResolutionTicks); timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { interval = resolution.GetInterval(); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // PMUs times may float a little - provide a one millisecond tolerance window above the standard interval tolerance = interval.Add(TimeSpan.FromMilliseconds(1.0D)); DateTime lastStartTime = startTime; DateTime lastStopTime = startTime; DateTime nextExpectedTime = startTime; DateTime currentTime; long totalRegions = 0; // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); // Scan historian stream for given point over specified time range and data resolution while (stream.Read(key, value)) { currentTime = key.TimestampAsDate; // See if current time was not expected time and gap is larger than resolution tolerance - could simply // be user started with a time that was not aligned with desired resolution, hence the tolerance check if (currentTime != nextExpectedTime && currentTime - nextExpectedTime > tolerance) { if (lastStartTime != lastStopTime) { // Detected a data gap, return last contiguous region totalRegions++; yield return(new Tuple <DateTime, DateTime>(lastStartTime, lastStopTime)); } // Move start time to current value lastStartTime = currentTime; lastStopTime = lastStartTime; nextExpectedTime = lastStartTime + interval; } else { // Setup next expected timestamp nextExpectedTime += interval; lastStopTime = currentTime; } } // If no data gaps were detected, return a single value for full region for where there was data if (totalRegions == 0 && lastStartTime != lastStopTime) { yield return(new Tuple <DateTime, DateTime>(lastStartTime, lastStopTime)); } } }
/// <summary> /// Reads data from the SortedTreeEngine with the provided read options and server side filters. /// </summary> /// <param name="readerOptions">read options supplied to the reader. Can be null.</param> /// <param name="keySeekFilter">a seek based filter to follow. Can be null.</param> /// <param name="keyMatchFilter">a match based filer to follow. Can be null.</param> /// <returns>A stream that will read the specified data.</returns> public override TreeStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter) { if (m_reader != null) { throw new Exception("Sockets do not support concurrent readers. Dispose of old reader."); } m_stream.Write((byte)ServerCommand.Read); if (keySeekFilter == null) { m_stream.Write(false); } else { m_stream.Write(true); m_stream.Write(keySeekFilter.FilterType); keySeekFilter.Save(m_stream); } if (keyMatchFilter == null) { m_stream.Write(false); } else { m_stream.Write(true); m_stream.Write(keyMatchFilter.FilterType); keyMatchFilter.Save(m_stream); } if (readerOptions == null) { m_stream.Write(false); } else { m_stream.Write(true); readerOptions.Save(m_stream); } m_stream.Flush(); var command = (ServerResponse)m_stream.ReadUInt8(); switch (command) { case ServerResponse.UnhandledException: string exception = m_stream.ReadString(); throw new Exception("Server UnhandledExcetion: \n" + exception); case ServerResponse.UnknownOrCorruptSeekFilter: throw new Exception("Server does not recgonize the seek filter"); case ServerResponse.UnknownOrCorruptMatchFilter: throw new Exception("Server does not recgonize the match filter"); case ServerResponse.UnknownOrCorruptReaderOptions: throw new Exception("Server does not recgonize the reader options"); case ServerResponse.SerializingPoints: break; case ServerResponse.ErrorWhileReading: exception = m_stream.ReadString(); throw new Exception("Server Error While Reading: \n" + exception); default: throw new Exception("Unknown server response: " + command.ToString()); } m_reader = new PointReader(m_encodingMode, m_stream, () => m_reader = null); return(m_reader); }
protected abstract bool InternalRead(TKey key, TValue value, MatchFilterBase <TKey, TValue> filter);
//private void BtnExport_Click(object sender, EventArgs e) //{ // Settings.Default.Save(); // if (m_meta is null) // { // MessageBox.Show("Please download the metadata first."); // return; // } // if (m_selectedMeasurements is null || m_selectedMeasurements.Count == 0) // { // MessageBox.Show("There are no measurements to extract"); // return; // } // DateTime startTime = dateStartTime.Value; // DateTime stopTime = dateStopTime.Value; // if (startTime > stopTime) // { // MessageBox.Show("Start and Stop times are invalid"); // return; // } // TimeSpan interval = Resolutions.GetInterval((string)cmbResolution.SelectedItem); // HistorianClientOptions clientOptions = new HistorianClientOptions(); // clientOptions.DefaultDatabase = Settings.Default.HistorianInstanceName; // clientOptions.NetworkPort = Settings.Default.HistorianStreamingPort; // clientOptions.ServerNameOrIp = Settings.Default.ServerIP; // using (HistorianClient client = new HistorianClient(clientOptions)) // { // KeySeekFilterBase<HistorianKey> timeFilter; // if (interval.Ticks != 0) // timeFilter = TimestampFilter.CreateFromIntervalData<HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); // else // timeFilter = TimestampFilter.CreateFromRange<HistorianKey>(startTime, stopTime); // var points = m_selectedMeasurements.Select((x) => (ulong)x.PointID).ToArray(); // var pointFilter = PointIDFilter.CreateFromList<HistorianKey>(points); // var database = client.GetDefaultDatabase(); // var frames = database.GetFrames(timeFilter, pointFilter).RoundToTolerance(1); // using (var csvStream = new StreamWriter("C:\\temp\\file.csv")) // { // //csvStream.AutoFlush = false; // csvStream.Write("Timestamp,"); // foreach (var signal in m_selectedMeasurements) // { // csvStream.Write(signal.Description); // csvStream.Write(','); // } // csvStream.WriteLine(); // foreach (var frame in frames) // { // csvStream.Write(frame.Key.ToString("MM/dd/yyyy hh:mm:ss.fffffff")); // csvStream.Write(','); // foreach (var signal in m_selectedMeasurements) // { // HistorianValueStruct value; // if (frame.Value.Points.TryGetValue((ulong)signal.PointID, out value)) // { // csvStream.Write(value.AsSingle); // } // csvStream.Write(','); // } // csvStream.WriteLine(); // } // csvStream.Flush(); // } // database.Disconnect(); // } //} //private void BtnExport_Click(object sender, EventArgs e) //{ // Settings.Default.Save(); // if (m_meta is null) // { // MessageBox.Show("Please download the metadata first."); // return; // } // if (m_selectedMeasurements is null || m_selectedMeasurements.Count == 0) // { // MessageBox.Show("There are no measurements to extract"); // return; // } // DateTime startTime = dateStartTime.Value; // DateTime stopTime = dateStopTime.Value; // if (startTime > stopTime) // { // MessageBox.Show("Start and Stop times are invalid"); // return; // } // TimeSpan interval = Resolutions.GetInterval((string)cmbResolution.SelectedItem); // HistorianClientOptions clientOptions = new HistorianClientOptions(); // clientOptions.DefaultDatabase = Settings.Default.HistorianInstanceName; // clientOptions.NetworkPort = Settings.Default.HistorianStreamingPort; // clientOptions.ServerNameOrIp = Settings.Default.ServerIP; // using (HistorianClient client = new HistorianClient(clientOptions)) // { // KeySeekFilterBase<HistorianKey> timeFilter; // if (interval.Ticks != 0) // timeFilter = TimestampFilter.CreateFromIntervalData<HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); // else // timeFilter = TimestampFilter.CreateFromRange<HistorianKey>(startTime, stopTime); // var points = m_selectedMeasurements.Select((x) => (ulong)x.PointID).ToArray(); // var pointFilter = PointIDFilter.CreateFromList<HistorianKey>(points); // var database = client.GetDefaultDatabase(); // using (var frameReader = database.GetPointStream(timeFilter, pointFilter).GetFrameReader()) // using (var csvStream = new StreamWriter("C:\\temp\\file.csv")) // { // var ultraStream = new UltraStreamWriter(csvStream); // //csvStream.AutoFlush = false; // csvStream.Write("Timestamp,"); // foreach (var signal in m_selectedMeasurements) // { // csvStream.Write(signal.Description); // csvStream.Write(','); // } // csvStream.WriteLine(); // while (frameReader.Read()) // { // csvStream.Write(frameReader.FrameTime.ToString("MM/dd/yyyy hh:mm:ss.fffffff")); // csvStream.Write(','); // foreach (var signal in m_selectedMeasurements) // { // HistorianValueStruct value; // if (frameReader.Frame.TryGetValue((ulong)signal.PointID, out value)) // { // ultraStream.Write(value.AsSingle); // //csvStream.Write(value.AsSingle); // } // //csvStream.Write(','); // ultraStream.Write(','); // } // ultraStream.Flush(); // csvStream.WriteLine(); // } // csvStream.Flush(); // } // database.Disconnect(); // } //} //private void BtnExport_Click(object sender, EventArgs e) //{ // Settings.Default.Save(); // if (m_meta is null) // { // MessageBox.Show("Please download the metadata first."); // return; // } // if (m_selectedMeasurements is null || m_selectedMeasurements.Count == 0) // { // MessageBox.Show("There are no measurements to extract"); // return; // } // DateTime startTime = dateStartTime.Value; // DateTime stopTime = dateStopTime.Value; // if (startTime > stopTime) // { // MessageBox.Show("Start and Stop times are invalid"); // return; // } // TimeSpan interval = Resolutions.GetInterval((string)cmbResolution.SelectedItem); // HistorianClientOptions clientOptions = new HistorianClientOptions(); // clientOptions.DefaultDatabase = Settings.Default.HistorianInstanceName; // clientOptions.NetworkPort = Settings.Default.HistorianStreamingPort; // clientOptions.ServerNameOrIp = Settings.Default.ServerIP; // using (HistorianClient client = new HistorianClient(clientOptions)) // { // m_readIndex = 0; // m_fillMeasurements.Clear(); // m_measurementsInOrder.Clear(); // foreach (var signal in m_selectedMeasurements) // { // var m = new Measurements(); // m_fillMeasurements.Add((ulong)signal.PointID, m); // m_measurementsInOrder.Add(m); // } // KeySeekFilterBase<HistorianKey> timeFilter; // if (interval.Ticks != 0) // timeFilter = TimestampFilter.CreateFromIntervalData<HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); // else // timeFilter = TimestampFilter.CreateFromRange<HistorianKey>(startTime, stopTime); // var points = m_selectedMeasurements.Select((x) => (ulong)x.PointID).ToArray(); // var pointFilter = PointIDFilter.CreateFromList<HistorianKey>(points); // var database = client.GetDefaultDatabase(); // using (var fillAdapter = database.GetPointStream(timeFilter, pointFilter).GetFillAdapter()) // using (var csvStream = new StreamWriter("C:\\temp\\file.csv")) // { // var ultraStream = new UltraStreamWriter(csvStream); // //csvStream.AutoFlush = false; // csvStream.Write("Timestamp,"); // foreach (var signal in m_selectedMeasurements) // { // csvStream.Write(signal.Description); // csvStream.Write(','); // } // csvStream.WriteLine(); // m_readIndex++; // while (fillAdapter.Fill(FillData)) // { // csvStream.Write(fillAdapter.FrameTime.ToString("MM/dd/yyyy hh:mm:ss.fffffff")); // csvStream.Write(','); // foreach (var signal in m_measurementsInOrder) // { // if (signal.ReadNumber == m_readIndex) // { // ultraStream.Write(signal.Value); // } // ultraStream.Write(','); // } // ultraStream.Flush(); // csvStream.WriteLine(); // m_readIndex++; // } // csvStream.Flush(); // } // database.Disconnect(); // } //} private void BtnExport_Click(object sender, EventArgs e) { Settings.Default.Save(); if (m_meta is null) { MessageBox.Show("Please download the metadata first."); return; } if (m_selectedMeasurements is null || m_selectedMeasurements.Count == 0) { MessageBox.Show("There are no measurements to extract"); return; } DateTime startTime = dateStartTime.Value; DateTime stopTime = dateStopTime.Value; if (startTime > stopTime) { MessageBox.Show("Start and Stop times are invalid"); return; } BtnExport.Tag = BtnExport.Text; BtnExport.Text = "Exporting..."; BtnExport.Enabled = false; Cursor.Current = Cursors.WaitCursor; Application.DoEvents(); TimeSpan interval = Resolutions.GetInterval((string)cmbResolution.SelectedItem); Thread workerThread = new Thread(start => { long processingStartTime = DateTime.UtcNow.Ticks; using (HistorianClient client = new HistorianClient(TxtServerIP.Text, int.Parse(TxtHistorianPort.Text))) { m_readIndex = 0; m_fillMeasurements.Clear(); m_measurementsInOrder.Clear(); foreach (MeasurementRow signal in m_selectedMeasurements) { Measurements m = new Measurements(); m_fillMeasurements.Add((ulong)signal.PointID, m); m_measurementsInOrder.Add(m); } SeekFilterBase <HistorianKey> timeFilter; if (interval.Ticks != 0) { timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } else { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } ulong[] points = m_selectedMeasurements.Select((x) => (ulong)x.PointID).ToArray(); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(points); using (ClientDatabaseBase <HistorianKey, HistorianValue> database = client.GetDatabase <HistorianKey, HistorianValue>(TxtHistorianInstance.Text)) { string fileName = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.DesktopDirectory), "Export.csv"); using (DataFillAdapter fillAdapter = database.GetPointStream(timeFilter, pointFilter).GetFillAdapter()) using (StreamWriter csvStream = new StreamWriter(fileName)) { UltraStreamWriter ultraStream = new UltraStreamWriter(csvStream); //csvStream.AutoFlush = false; csvStream.Write("Timestamp,"); foreach (MeasurementRow signal in m_selectedMeasurements) { csvStream.Write(signal.Description); csvStream.Write(','); } csvStream.WriteLine(); m_readIndex++; while (fillAdapter.Fill(FillData)) { csvStream.Write(fillAdapter.FrameTime.ToString("MM/dd/yyyy hh:mm:ss.fffffff")); csvStream.Write(','); foreach (Measurements signal in m_measurementsInOrder) { if (signal.ReadNumber == m_readIndex) { ultraStream.Write(signal.Value); } ultraStream.Write(','); } ultraStream.Flush(); csvStream.WriteLine(); m_readIndex++; } csvStream.Flush(); } } } Ticks runtime = DateTime.UtcNow.Ticks - processingStartTime; BeginInvoke(new Action <Ticks>(r => MessageBox.Show(r.ToElapsedTimeString(2), "Processing Time", MessageBoxButtons.OK, MessageBoxIcon.Information)), runtime); BeginInvoke(new Action(RestoreExportButton)); }); workerThread.Start(); }
public TreeStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter, WorkerThreadSynchronization workerThreadSynchronization) { if (m_disposed) { throw new ObjectDisposedException(GetType().FullName); } SequentialReaderStream <TKey, TValue> stream = m_server.Read(readerOptions, keySeekFilter, keyMatchFilter, workerThreadSynchronization); if (!stream.EOS) { stream.Disposed += OnStreamDisposal; lock (m_syncRoot) m_openStreams.Add(stream); } return(stream); }
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <param name="timestamps">the timestamps to query for</param> /// <param name="points">the points to query</param> /// <param name="options">A list of query options</param> /// <returns></returns> public static PointStream GetPointStream(this IDatabaseReader <HistorianKey, HistorianValue> database, SortedTreeEngineReaderOptions options, SeekFilterBase <HistorianKey> timestamps, MatchFilterBase <HistorianKey, HistorianValue> points) { return(new PointStream(database, database.Read(options, timestamps, points))); }
/// <summary> /// Reads data from the SortedTreeEngine with the provided read options and server side filters. /// </summary> /// <param name="readerOptions">read options supplied to the reader. Can be null.</param> /// <param name="keySeekFilter">a seek based filter to follow. Can be null.</param> /// <param name="keyMatchFilter">a match based filer to follow. Can be null.</param> /// <returns>A stream that will read the specified data.</returns> public override TreeStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter) { return(Read(readerOptions, keySeekFilter, keyMatchFilter, null)); }
protected abstract bool InternalReadWhile(TKey key, TValue value, TKey upperBounds, MatchFilterBase <TKey, TValue> filter);
/// <summary> /// Starts a query that will read data source values, given a set of point IDs and targets, over a time range. /// </summary> /// <param name="startTime">Start-time for query.</param> /// <param name="stopTime">Stop-time for query.</param> /// <param name="interval">Interval from Grafana request.</param> /// <param name="decimate">Flag that determines if data should be decimated over provided time range.</param> /// <param name="targetMap">Set of IDs with associated targets to query.</param> /// <returns>Queried data source data in terms of value and time.</returns> protected override IEnumerable <DataSourceValue> QueryDataSourceValues(DateTime startTime, DateTime stopTime, string interval, bool decimate, Dictionary <ulong, string> targetMap) { SnapServer server = GetAdapterInstance(InstanceName)?.Server?.Host; if ((object)server == null) { yield break; } using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(InstanceName)) { if ((object)database == null) { yield break; } Resolution resolution = TrendValueAPI.EstimatePlotResolution(InstanceName, startTime, stopTime, targetMap.Keys); SeekFilterBase <HistorianKey> timeFilter; // Set data scan resolution if (!decimate || resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { TimeSpan resolutionInterval = resolution.GetInterval(); BaselineTimeInterval timeInterval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { timeInterval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { timeInterval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { timeInterval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(timeInterval); stopTime = stopTime.BaselinedTimestamp(timeInterval); int milliseconds = 1; try { ConfigurationFile configFile = ConfigurationFile.Open(AppDomain.CurrentDomain.SetupInformation.ConfigurationFile); CategorizedSettingsSection categorizedSettings = configFile.Settings; CategorizedSettingsElementCollection systemSettings = categorizedSettings["systemSettings"]; string val = systemSettings["HistoryTolerance"].Value; } catch { } // something went wrong, so just use original default timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond * milliseconds)); } // Setup point ID selections MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(targetMap.Keys); // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); while (stream.Read(key, value)) { yield return(new DataSourceValue { Target = targetMap[key.PointID], Time = (key.Timestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle, Flags = (MeasurementStateFlags)value.Value3 }); } } } }
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <param name="timestamps">the timestamps to query for</param> /// <param name="points">the points to query</param> /// <param name="options">A list of query options</param> /// <returns></returns> public static SortedList<DateTime, FrameData> GetFrames(this IDatabaseReader<HistorianKey, HistorianValue> database, SortedTreeEngineReaderOptions options, SeekFilterBase<HistorianKey> timestamps, MatchFilterBase<HistorianKey,HistorianValue> points) { return database.Read(options, timestamps, points).GetFrames(); }
/// <summary> /// Starts a query that will read data source values, given a set of point IDs and targets, over a time range. /// </summary> /// <param name="startTime">Start-time for query.</param> /// <param name="stopTime">Stop-time for query.</param> /// <param name="interval">Interval from Grafana request.</param> /// <param name="includePeaks">Flag that determines if decimated data should include min/max interval peaks over provided time range.</param> /// <param name="targetMap">Set of IDs with associated targets to query.</param> /// <returns>Queried data source data in terms of value and time.</returns> protected override IEnumerable <DataSourceValue> QueryDataSourceValues(DateTime startTime, DateTime stopTime, string interval, bool includePeaks, Dictionary <ulong, string> targetMap) { SnapServer server = GetAdapterInstance(InstanceName)?.Server?.Host; if (server == null) { yield break; } using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(InstanceName)) { if (database == null) { yield break; } if (!TryParseInterval(interval, out TimeSpan resolutionInterval)) { Resolution resolution = TrendValueAPI.EstimatePlotResolution(InstanceName, startTime, stopTime, targetMap.Keys); resolutionInterval = resolution.GetInterval(); } BaselineTimeInterval timeInterval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { timeInterval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { timeInterval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { timeInterval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(timeInterval); stopTime = stopTime.BaselinedTimestamp(timeInterval); if (startTime == stopTime) { stopTime = stopTime.AddSeconds(1.0D); } SeekFilterBase <HistorianKey> timeFilter; // Set timestamp filter resolution if (includePeaks || resolutionInterval == TimeSpan.Zero) { // Full resolution query timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { // Interval query timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(targetMap.Keys); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(targetMap.Count); Dictionary <ulong, Peak> peaks = new Dictionary <ulong, Peak>(targetMap.Count); ulong resolutionSpan = (ulong)resolutionInterval.Ticks; if (includePeaks) { resolutionSpan *= 2UL; } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); Peak peak = Peak.Default; while (stream.Read(key, value)) { ulong pointID = key.PointID; ulong timestamp = key.Timestamp; float pointValue = value.AsSingle; if (includePeaks) { peak = peaks.GetOrAdd(pointID, _ => new Peak()); peak.Set(pointValue, timestamp); } if (resolutionSpan > 0UL && timestamp - lastTimes.GetOrAdd(pointID, 0UL) < resolutionSpan) { continue; } // New value is ready for publication string target = targetMap[pointID]; MeasurementStateFlags flags = (MeasurementStateFlags)value.Value3; if (includePeaks) { if (peak.MinTimestamp > 0UL) { yield return(new DataSourceValue { Target = target, Value = peak.Min, Time = (peak.MinTimestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } if (peak.MaxTimestamp != peak.MinTimestamp) { yield return(new DataSourceValue { Target = target, Value = peak.Max, Time = (peak.MaxTimestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } peak.Reset(); } else { yield return(new DataSourceValue { Target = target, Value = pointValue, Time = (timestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } lastTimes[pointID] = timestamp; } } } }
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <param name="timestamps">the timestamps to query for</param> /// <param name="points">the points to query</param> /// <param name="options">A list of query options</param> /// <returns></returns> public static SortedList <DateTime, FrameData> GetFrames(this IDatabaseReader <HistorianKey, HistorianValue> database, SortedTreeEngineReaderOptions options, SeekFilterBase <HistorianKey> timestamps, MatchFilterBase <HistorianKey, HistorianValue> points) { return(database.Read(options, timestamps, points).GetFrames()); }
private async Task CopyModelAsCsvToStreamAsync(SecurityPrincipal securityPrincipal, NameValueCollection requestParameters, Stream responseStream, CancellationToken cancellationToken) { const double DefaultFrameRate = 30; const int DefaultTimestampSnap = 0; string dateTimeFormat = Program.Host.Model.Global.DateTimeFormat; // TODO: Improve operation for large point lists: // Pick-up "POST"ed parameters with a "genurl" param, then cache parameters // in a memory cache and return the unique URL (a string instead of a file) // with a "download" param and unique ID associated with cached parameters. // Then extract params based on unique ID and follow normal steps... // Note TSTolerance is in ms string pointIDsParam = requestParameters["PointIDs"]; string startTimeParam = requestParameters["StartTime"]; string endTimeParam = requestParameters["EndTime"]; string timestampSnapParam = requestParameters["TSSnap"]; string frameRateParam = requestParameters["FrameRate"]; string alignTimestampsParam = requestParameters["AlignTimestamps"]; string missingAsNaNParam = requestParameters["MissingAsNaN"]; string fillMissingTimestampsParam = requestParameters["FillMissingTimestamps"]; string instanceName = requestParameters["InstanceName"]; string toleranceParam = requestParameters["TSTolerance"]; ulong[] pointIDs; string headers; if (string.IsNullOrEmpty(pointIDsParam)) { throw new ArgumentNullException("PointIDs", "Cannot export data: no values were provided in \"PointIDs\" parameter."); } try { pointIDs = pointIDsParam.Split(',').Select(ulong.Parse).ToArray(); Array.Sort(pointIDs); } catch (Exception ex) { throw new ArgumentNullException("PointIDs", $"Cannot export data: failed to parse \"PointIDs\" parameter value \"{pointIDsParam}\": {ex.Message}"); } if (string.IsNullOrEmpty(startTimeParam)) { throw new ArgumentNullException("StartTime", "Cannot export data: no \"StartTime\" parameter value was specified."); } if (string.IsNullOrEmpty(pointIDsParam)) { throw new ArgumentNullException("EndTime", "Cannot export data: no \"EndTime\" parameter value was specified."); } DateTime startTime, endTime; try { startTime = DateTime.ParseExact(startTimeParam, dateTimeFormat, null, DateTimeStyles.AdjustToUniversal); } catch (Exception ex) { throw new ArgumentException($"Cannot export data: failed to parse \"StartTime\" parameter value \"{startTimeParam}\". Expected format is \"{dateTimeFormat}\". Error message: {ex.Message}", "StartTime", ex); } try { endTime = DateTime.ParseExact(endTimeParam, dateTimeFormat, null, DateTimeStyles.AdjustToUniversal); } catch (Exception ex) { throw new ArgumentException($"Cannot export data: failed to parse \"EndTime\" parameter value \"{endTimeParam}\". Expected format is \"{dateTimeFormat}\". Error message: {ex.Message}", "EndTime", ex); } if (startTime > endTime) { throw new ArgumentOutOfRangeException("StartTime", "Cannot export data: start time exceeds end time."); } using (DataContext dataContext = new DataContext()) { // Validate current user has access to requested data if (!dataContext.UserIsInRole(securityPrincipal, s_minimumRequiredRoles)) { throw new SecurityException($"Cannot export data: access is denied for user \"{Thread.CurrentPrincipal.Identity?.Name ?? "Undefined"}\", minimum required roles = {s_minimumRequiredRoles.ToDelimitedString(", ")}."); } headers = GetHeaders(dataContext, pointIDs.Select(id => (int)id)); } if (!double.TryParse(frameRateParam, out double frameRate)) { frameRate = DefaultFrameRate; } if (!int.TryParse(timestampSnapParam, out int timestampSnap)) { timestampSnap = DefaultTimestampSnap; } if (!double.TryParse(toleranceParam, out double tolerance)) { tolerance = 0.5; } int toleranceTicks = (int)Math.Ceiling(tolerance * Ticks.PerMillisecond); bool alignTimestamps = alignTimestampsParam?.ParseBoolean() ?? true; bool missingAsNaN = missingAsNaNParam?.ParseBoolean() ?? true; bool fillMissingTimestamps = alignTimestamps && (fillMissingTimestampsParam?.ParseBoolean() ?? false); if (string.IsNullOrEmpty(instanceName)) { instanceName = TrendValueAPI.DefaultInstanceName; } LocalOutputAdapter.Instances.TryGetValue(instanceName, out LocalOutputAdapter adapter); HistorianServer serverInstance = adapter?.Server; if (serverInstance == null) { throw new InvalidOperationException($"Cannot export data: failed to access internal historian server instance \"{instanceName}\"."); } const int TargetBufferSize = 524288; StringBuilder readBuffer = new StringBuilder(TargetBufferSize * 2); ManualResetEventSlim bufferReady = new ManualResetEventSlim(false); List <string> writeBuffer = new List <string>(); object writeBufferLock = new object(); bool readComplete = false; Task readTask = Task.Factory.StartNew(() => { try { using (SnapClient connection = SnapClient.Connect(serverInstance.Host)) { Dictionary <ulong, int> pointIDIndex = new Dictionary <ulong, int>(pointIDs.Length); float[] values = new float[pointIDs.Length]; for (int i = 0; i < pointIDs.Length; i++) { pointIDIndex.Add(pointIDs[i], i); } for (int i = 0; i < values.Length; i++) { values[i] = float.NaN; } ulong interval; if (Math.Abs(frameRate % 1) <= (double.Epsilon * 100)) { Ticks[] subseconds = Ticks.SubsecondDistribution((int)frameRate); interval = (ulong)(subseconds.Length > 1 ? subseconds[1].Value : Ticks.PerSecond); } else { interval = (ulong)(Math.Floor(1.0d / frameRate) * Ticks.PerSecond); } ulong lastTimestamp = 0; // Write data pages SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, endTime); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); HistorianKey historianKey = new HistorianKey(); HistorianValue historianValue = new HistorianValue(); // Write row values function Action bufferValues = () => { readBuffer.Append(missingAsNaN ? string.Join(",", values) : string.Join(",", values.Select(val => float.IsNaN(val) ? "" : $"{val}"))); if (readBuffer.Length < TargetBufferSize) { return; } lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); readBuffer.Clear(); bufferReady.Set(); }; using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { // Start stream reader for the provided time window and selected points TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); ulong timestamp = 0; // Adjust timestamp to use first timestamp as base bool adjustTimeStamp = true; long baseTime = startTime.Ticks; if (timestampSnap == 0) { adjustTimeStamp = false; baseTime = Ticks.RoundToSecondDistribution(startTime.Ticks, frameRate, startTime.Ticks - startTime.Ticks % Ticks.PerSecond); } else if (timestampSnap == 1) { adjustTimeStamp = true; } else if (timestampSnap == 2) { adjustTimeStamp = false; baseTime = startTime.Ticks; } while (stream.Read(historianKey, historianValue) && !cancellationToken.IsCancellationRequested) { if (alignTimestamps) { if (adjustTimeStamp) { adjustTimeStamp = false; baseTime = (long)historianKey.Timestamp; } // Make sure the timestamp is actually close enough to the distribution Ticks ticks = Ticks.ToSecondDistribution((long)historianKey.Timestamp, frameRate, baseTime, toleranceTicks); if (ticks == Ticks.MinValue) { continue; } timestamp = (ulong)ticks.Value; } else { timestamp = historianKey.Timestamp; } // Start a new row for each encountered new timestamp if (timestamp != lastTimestamp) { if (lastTimestamp > 0) { bufferValues(); } for (int i = 0; i < values.Length; i++) { values[i] = float.NaN; } if (fillMissingTimestamps && lastTimestamp > 0 && timestamp > lastTimestamp) { ulong difference = timestamp - lastTimestamp; if (difference > interval) { ulong interpolated = lastTimestamp; for (ulong i = 1; i < difference / interval; i++) { interpolated = (ulong)Ticks.RoundToSecondDistribution((long)(interpolated + interval), frameRate, startTime.Ticks).Value; readBuffer.Append($"{Environment.NewLine}{new DateTime((long)interpolated, DateTimeKind.Utc).ToString(dateTimeFormat)},"); bufferValues(); } } } readBuffer.Append($"{Environment.NewLine}{new DateTime((long)timestamp, DateTimeKind.Utc).ToString(dateTimeFormat)},"); lastTimestamp = timestamp; } // Save value to its column values[pointIDIndex[historianKey.PointID]] = historianValue.AsSingle; } if (timestamp > 0) { bufferValues(); } if (readBuffer.Length > 0) { lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); } } } } finally { readComplete = true; bufferReady.Set(); } }, cancellationToken); Task writeTask = Task.Factory.StartNew(() => { using (StreamWriter writer = new StreamWriter(responseStream)) { //Ticks exportStart = DateTime.UtcNow.Ticks; string[] localBuffer; // Write column headers writer.Write(headers); while ((writeBuffer.Count > 0 || !readComplete) && !cancellationToken.IsCancellationRequested) { bufferReady.Wait(cancellationToken); bufferReady.Reset(); lock (writeBufferLock) { localBuffer = writeBuffer.ToArray(); writeBuffer.Clear(); } foreach (string buffer in localBuffer) { writer.Write(buffer); } } // Flush stream writer.Flush(); //Debug.WriteLine("Export time: " + (DateTime.UtcNow.Ticks - exportStart).ToElapsedTimeString(3)); } }, cancellationToken); await readTask; await writeTask; }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="server">The server to use for the query.</param> /// <param name="instanceName">Name of the archive to be queried.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable <TrendValue> GetHistorianData(SnapServer server, string instanceName, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if (cancellationToken == null) { cancellationToken = new CancellationToken(); } if (server == null) { yield break; } // Setting series limit to zero requests full resolution data, which overrides provided parameter if (seriesLimit < 1) { resolution = Resolution.Full; } TimeSpan resolutionInterval = resolution.GetInterval(); SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } Dictionary <ulong, DataRow> metadata = null; using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { if (database == null) { yield break; } if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out LocalOutputAdapter historianAdapter)) { metadata = historianAdapter?.Measurements; } if (metadata == null) { yield break; } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } else { measurementIDs = metadata.Keys.ToArray(); } // Start stream reader for the provided time window and selected points Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, long> intervals = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(measurementIDs.Length); double range = (stopTime - startTime).TotalSeconds; ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; if (resolutionSpan <= 1UL) { resolutionSpan = Ticks.PerSecond; } if (seriesLimit < 1) { seriesLimit = 1; } // Estimate total measurement counts per point so decimation intervals for each series can be calculated foreach (ulong measurementID in measurementIDs) { if (resolution == Resolution.Full) { pointCounts[measurementID] = metadata.TryGetValue(measurementID, out DataRow row) ? (long)(int.Parse(row["FramesPerSecond"].ToString()) * range) : 2; } else { pointCounts[measurementID] = (long)(range / resolutionInterval.TotalSeconds.NotZero(1.0D)); } } foreach (ulong measurementID in pointCounts.Keys) { intervals[measurementID] = (pointCounts[measurementID] / seriesLimit).NotZero(1L); } using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; if (pointCount++ % intervals[pointID] == 0 || !forceLimit && timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan) { yield return new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle } } ; pointCounts[pointID] = pointCount; lastTimes[pointID] = timestamp; } } } } }
public SequentialReaderStream(ArchiveList <TKey, TValue> archiveList, SortedTreeEngineReaderOptions readerOptions = null, SeekFilterBase <TKey> keySeekFilter = null, MatchFilterBase <TKey, TValue> keyMatchFilter = null, WorkerThreadSynchronization workerThreadSynchronization = null) { if (readerOptions is null) { readerOptions = SortedTreeEngineReaderOptions.Default; } if (keySeekFilter is null) { keySeekFilter = new SeekFilterUniverse <TKey>(); } if (keyMatchFilter is null) { keyMatchFilter = new MatchFilterUniverse <TKey, TValue>(); } if (workerThreadSynchronization is null) { m_ownsWorkerThreadSynchronization = true; workerThreadSynchronization = new WorkerThreadSynchronization(); } m_workerThreadSynchronization = workerThreadSynchronization; m_pointCount = 0; m_keySeekFilter = keySeekFilter; m_keyMatchFilter = keyMatchFilter; m_keyMatchIsUniverse = m_keyMatchFilter as MatchFilterUniverse <TKey, TValue> != null; if (readerOptions.Timeout.Ticks > 0) { m_timeout = new TimeoutOperation(); m_timeout.RegisterTimeout(readerOptions.Timeout, () => m_timedOut = true); } m_snapshot = archiveList.CreateNewClientResources(); m_snapshot.UpdateSnapshot(); m_tablesOrigList = new List <BufferedArchiveStream <TKey, TValue> >(); for (int x = 0; x < m_snapshot.Tables.Count(); x++) { ArchiveTableSummary <TKey, TValue> table = m_snapshot.Tables[x]; if (table != null) { if (table.Contains(keySeekFilter.StartOfRange, keySeekFilter.EndOfRange)) { try { m_tablesOrigList.Add(new BufferedArchiveStream <TKey, TValue>(x, table)); } catch (Exception e) { //ToDo: Make sure firstkey.tostring doesn't ever throw an exception. StringBuilder sb = new StringBuilder(); sb.AppendLine($"Archive ID {table.FileId}"); sb.AppendLine($"First Key {table.FirstKey.ToString()}"); sb.AppendLine($"Last Key {table.LastKey.ToString()}"); sb.AppendLine($"File Size {table.SortedTreeTable.BaseFile.ArchiveSize}"); sb.AppendLine($"File Name {table.SortedTreeTable.BaseFile.FilePath}"); Log.Publish(MessageLevel.Error, "Error while reading file", sb.ToString(), null, e); } } else { m_snapshot.Tables[x] = null; } } } m_sortedArchiveStreams = new CustomSortHelper <BufferedArchiveStream <TKey, TValue> >(m_tablesOrigList, IsLessThan); m_keySeekFilter.Reset(); if (m_keySeekFilter.NextWindow()) { SeekToKey(m_keySeekFilter.StartOfFrame); } else { Dispose(); } }