public SnapDBClient(string hostAddress, int port, string instanceName, DateTime startTime, DateTime endTime, int frameRate, IEnumerable <ulong> pointIDs) { m_client = new HistorianClient(hostAddress, port); m_database = m_client.GetDatabase <HistorianKey, HistorianValue>(instanceName); m_key = new HistorianKey(); m_value = new HistorianValue(); SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(DataPoint.RoundTimestamp(startTime, frameRate), DataPoint.RoundTimestamp(endTime, frameRate)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); m_stream = m_database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); }
public void BenchmarkRealisticSamples() { MemoryPoolTest.TestMemoryLeak(); const int Max = 1000000; const int FileCount = 1000; ArchiveList <HistorianKey, HistorianValue> list = new ArchiveList <HistorianKey, HistorianValue>(null); DateTime start = DateTime.Now.Date; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); for (int x = 0; x < FileCount; x++) { SortedTreeTable <HistorianKey, HistorianValue> table1 = CreateTable(); AddData(table1, start.AddMinutes(2 * x), new TimeSpan(TimeSpan.TicksPerSecond), 60, 100, 1, Max / 60 / FileCount); using (ArchiveListEditor <HistorianKey, HistorianValue> editor = list.AcquireEditLock()) { editor.Add(table1); } } SeekFilterBase <HistorianKey> filter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(start, start.AddMinutes(2 * FileCount), new TimeSpan(TimeSpan.TicksPerSecond * 2), new TimeSpan(TimeSpan.TicksPerMillisecond)); SequentialReaderStream <HistorianKey, HistorianValue> sequencer = new SequentialReaderStream <HistorianKey, HistorianValue>(list, null, filter); DebugStopwatch sw = new DebugStopwatch(); int xi = 0; double sec = sw.TimeEvent(() => { SequentialReaderStream <HistorianKey, HistorianValue> scanner = sequencer; while (scanner.Read(key, value)) { xi++; } }); System.Console.WriteLine(Max / sec / 1000000); //TreeKeyMethodsBase<HistorianKey>.WriteToConsole(); //TreeValueMethodsBase<HistorianValue>.WriteToConsole(); //Console.WriteLine("KeyMethodsBase calls"); //for (int x = 0; x < 23; x++) //{ // Console.WriteLine(TreeKeyMethodsBase<HistorianKey>.CallMethods[x] + "\t" + ((TreeKeyMethodsBase<HistorianKey>.Method)(x)).ToString()); //} //Console.WriteLine("ValueMethodsBase calls"); //for (int x = 0; x < 5; x++) //{ // Console.WriteLine(TreeValueMethodsBase<HistorianValue>.CallMethods[x] + "\t" + ((TreeValueMethodsBase<HistorianValue>.Method)(x)).ToString()); //} list.Dispose(); MemoryPoolTest.TestMemoryLeak(); }
public static IEnumerable GetHistorianDataSampled(SqlString historianServer, SqlString instanceName, DateTime startTime, DateTime stopTime, TimeSpan interval, [SqlFacet(MaxSize = -1)] SqlString measurementIDs) { const int DefaultHistorianPort = 38402; if (historianServer.IsNull || string.IsNullOrEmpty(historianServer.Value)) { throw new ArgumentNullException("historianServer", "Missing historian server parameter"); } if (instanceName.IsNull || string.IsNullOrEmpty(instanceName.Value)) { throw new ArgumentNullException("instanceName", "Missing historian instance name parameter"); } if (startTime > stopTime) { throw new ArgumentException("Invalid time range specified", "startTime"); } string[] parts = historianServer.Value.Split(':'); string hostName = parts[0]; if (parts.Length < 2 || !int.TryParse(parts[1], out int port)) { port = DefaultHistorianPort; } using (HistorianClient client = new HistorianClient(hostName, port)) using (ClientDatabaseBase <HistorianKey, HistorianValue> reader = client.GetDatabase <HistorianKey, HistorianValue>(instanceName.Value)) { SeekFilterBase <HistorianKey> timeFilter = interval.Ticks == 0 ? TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime) : TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); if (!measurementIDs.IsNull && !string.IsNullOrEmpty(measurementIDs.Value)) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs.Value.Split(',').Select(ulong.Parse)); } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = reader.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value)) { yield return(new Measurement(key.PointID, key.TimestampAsDate, value.AsSingle)); } } } }
public Ticks ReadBackHistorianData(HistorianIArchive archive, Action <int> updateProgressBar) { IEnumerable <ulong> points; if (m_settings.ReadFromCsv) { points = m_indexToPointIDLookup.Skip(1); // First value is always 0 because the timestamp is the first column } else { points = m_points; } if (points == null) { ShowMessage("Point list not initialized"); return(new Ticks(0)); } int count = 0; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); TreeStream <HistorianKey, HistorianValue> m_stream; SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(DataPoint.RoundTimestamp(m_startTime, m_settings.FrameRate), DataPoint.RoundTimestamp(m_endTime, m_settings.FrameRate)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(points); m_stream = archive.ClientDatabase.Read(GSF.Snap.Services.Reader.SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); int messageInterval = points.Count() * m_settings.MessageInterval; DateTime startTime = DateTime.UtcNow; while (m_stream.Read(key, value)) { count++; if (count % messageInterval == 0) { PercentComplete = (int)((1.0D - (new Ticks(m_endTime.Ticks - (long)key.Timestamp).ToSeconds() / m_timeRange)) * 100.0D); ShowMessage($"{Environment.NewLine}{count} points read back so far, averaging {(count / (DateTime.UtcNow - startTime).TotalSeconds):N0} points per second."); updateProgressBar(PercentComplete); } } return(DateTime.UtcNow - startTime); }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Comma separated list of measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <returns>Enumeration of <see cref="IMeasurement"/> values read for time range.</returns> /// <example> /// <code> /// using (var connection = new Connection("127.0.0.1", "PPA")) /// foreach(var measurement in GetHistorianData(connection, DateTime.UtcNow.AddMinutes(-1.0D), DateTime.UtcNow)) /// Console.WriteLine("{0}:{1} @ {2} = {3}, quality: {4}", measurement.Key.Source, measurement.Key.ID, measurement.Timestamp, measurement.Value, measurement.StateFlags); /// </code> /// </example> public static IEnumerable <IMeasurement> GetHistorianData(Connection connection, DateTime startTime, DateTime stopTime, string measurementIDs = null, Resolution resolution = Resolution.Full) { SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolution.GetInterval(), new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections if (!string.IsNullOrEmpty(measurementIDs)) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs.Split(',').Select(ulong.Parse)); } // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value)) { yield return new Measurement() { Metadata = MeasurementKey.LookUpOrCreate(connection.InstanceName, (uint)key.PointID).Metadata, Timestamp = key.TimestampAsDate, Value = value.AsSingle, StateFlags = (MeasurementStateFlags)value.Value3 } } ; } }
public void TestFixedRange() { var list = new List <ulong>(); var pointId = TimestampSeekFilter.CreateFromRange <HistorianKey>(0, 100); if (!pointId.GetType().FullName.Contains("FixedRange")) { throw new Exception("Wrong type"); } using (var bs = new BinaryStream(allocatesOwnMemory: true)) { bs.Write(pointId.FilterType); pointId.Save(bs); bs.Position = 0; var filter = Library.Filters.GetSeekFilter <HistorianKey>(bs.ReadGuid(), bs); if (!filter.GetType().FullName.Contains("FixedRange")) { throw new Exception("Wrong type"); } } }
public void ConsoleTest2() { MemoryPoolTest.TestMemoryLeak(); ArchiveList <HistorianKey, HistorianValue> list = new ArchiveList <HistorianKey, HistorianValue>(null); DateTime start = DateTime.Now.Date; for (int x = 0; x < 3; x++) { SortedTreeTable <HistorianKey, HistorianValue> table1 = CreateTable(); AddDataTerminal(table1, (ulong)x, start, new TimeSpan(TimeSpan.TicksPerSecond), (ulong)(1000 * x), 1, 60 * 60); using (ArchiveListEditor <HistorianKey, HistorianValue> editor = list.AcquireEditLock()) { editor.Add(table1); } } SeekFilterBase <HistorianKey> filter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(start.AddMinutes(-100), start.AddMinutes(10), new TimeSpan(TimeSpan.TicksPerSecond * 60), new TimeSpan(TimeSpan.TicksPerSecond)); SequentialReaderStream <HistorianKey, HistorianValue> sequencer = new SequentialReaderStream <HistorianKey, HistorianValue>(list, null, filter); SortedList <DateTime, FrameData> frames = sequencer.GetFrames(); WriteToConsole(frames); list.Dispose(); MemoryPoolTest.TestMemoryLeak(); }
public void TestIntervalRanges() { _ = new List <ulong>(); SeekFilterBase <HistorianKey> pointId = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(0, 100, 10, 3, 1); if (!pointId.GetType().FullName.Contains("IntervalRanges")) { throw new Exception("Wrong type"); } using (BinaryStream bs = new BinaryStream(allocatesOwnMemory: true)) { bs.Write(pointId.FilterType); pointId.Save(bs); bs.Position = 0; SeekFilterBase <HistorianKey> filter = Library.Filters.GetSeekFilter <HistorianKey>(bs.ReadGuid(), bs); if (!filter.GetType().FullName.Contains("IntervalRanges")) { throw new Exception("Wrong type"); } } }
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <returns></returns> public static SortedList <DateTime, FrameData> GetFrames(this IDatabaseReader <HistorianKey, HistorianValue> database, DateTime startTime, DateTime stopTime, params ulong[] points) { return(database.GetFrames(SortedTreeEngineReaderOptions.Default, TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime), PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(points))); }
///// <summary> ///// Gets frames from the historian as individual frames. ///// </summary> ///// <param name="database">the database to use</param> ///// <returns></returns> //public static SortedList<DateTime, FrameData> GetFrames(this SortedTreeEngineBase<HistorianKey, HistorianValue> database, DateTime timestamp) //{ // return database.GetFrames(SortedTreeEngineReaderOptions.Default, TimestampFilter.CreateFromRange<HistorianKey>(timestamp, timestamp), PointIDFilter.CreateAllKeysValid<HistorianKey>(), null); //} /// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <returns></returns> public static SortedList <DateTime, FrameData> GetFrames(this IDatabaseReader <HistorianKey, HistorianValue> database, DateTime startTime, DateTime stopTime) { return(database.GetFrames(SortedTreeEngineReaderOptions.Default, TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime), null)); }
/// <summary> /// Starts a query that will read data source values, given a set of point IDs and targets, over a time range. /// </summary> /// <param name="startTime">Start-time for query.</param> /// <param name="stopTime">Stop-time for query.</param> /// <param name="interval">Interval from Grafana request.</param> /// <param name="decimate">Flag that determines if data should be decimated over provided time range.</param> /// <param name="targetMap">Set of IDs with associated targets to query.</param> /// <returns>Queried data source data in terms of value and time.</returns> protected override IEnumerable <DataSourceValue> QueryDataSourceValues(DateTime startTime, DateTime stopTime, string interval, bool decimate, Dictionary <ulong, string> targetMap) { SnapServer server = GetAdapterInstance(InstanceName)?.Server?.Host; if ((object)server == null) { yield break; } using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(InstanceName)) { if ((object)database == null) { yield break; } Resolution resolution = TrendValueAPI.EstimatePlotResolution(InstanceName, startTime, stopTime, targetMap.Keys); SeekFilterBase <HistorianKey> timeFilter; // Set data scan resolution if (!decimate || resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { TimeSpan resolutionInterval = resolution.GetInterval(); BaselineTimeInterval timeInterval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { timeInterval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { timeInterval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { timeInterval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(timeInterval); stopTime = stopTime.BaselinedTimestamp(timeInterval); int milliseconds = 1; try { ConfigurationFile configFile = ConfigurationFile.Open(AppDomain.CurrentDomain.SetupInformation.ConfigurationFile); CategorizedSettingsSection categorizedSettings = configFile.Settings; CategorizedSettingsElementCollection systemSettings = categorizedSettings["systemSettings"]; string val = systemSettings["HistoryTolerance"].Value; } catch { } // something went wrong, so just use original default timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond * milliseconds)); } // Setup point ID selections MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(targetMap.Keys); // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); while (stream.Read(key, value)) { yield return(new DataSourceValue { Target = targetMap[key.PointID], Time = (key.Timestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle, Flags = (MeasurementStateFlags)value.Value3 }); } } } }
public static TreeStream <TKey, TValue> Read <TKey, TValue>(this IDatabaseReader <TKey, TValue> reader, DateTime firstTime, DateTime lastTime, IEnumerable <ulong> pointIds) where TKey : TimestampPointIDBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { return(reader.Read(SortedTreeEngineReaderOptions.Default, TimestampSeekFilter.CreateFromRange <TKey>(firstTime, lastTime), PointIdMatchFilter.CreateFromList <TKey, TValue>(pointIds.ToList()))); }
public static TreeStream <TKey, TValue> Read <TKey, TValue>(this IDatabaseReader <TKey, TValue> reader, ulong firstTime, ulong lastTime) where TKey : TimestampPointIDBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { return(reader.Read(SortedTreeEngineReaderOptions.Default, TimestampSeekFilter.CreateFromRange <TKey>(firstTime, lastTime), null)); }
//private void BtnExport_Click(object sender, EventArgs e) //{ // Settings.Default.Save(); // if (m_meta == null) // { // MessageBox.Show("Please download the metadata first."); // return; // } // if (m_selectedMeasurements == null || m_selectedMeasurements.Count == 0) // { // MessageBox.Show("There are no measurements to extract"); // return; // } // DateTime startTime = dateStartTime.Value; // DateTime stopTime = dateStopTime.Value; // if (startTime > stopTime) // { // MessageBox.Show("Start and Stop times are invalid"); // return; // } // TimeSpan interval = Resolutions.GetInterval((string)cmbResolution.SelectedItem); // HistorianClientOptions clientOptions = new HistorianClientOptions(); // clientOptions.DefaultDatabase = Settings.Default.HistorianInstanceName; // clientOptions.NetworkPort = Settings.Default.HistorianStreamingPort; // clientOptions.ServerNameOrIp = Settings.Default.ServerIP; // using (HistorianClient client = new HistorianClient(clientOptions)) // { // KeySeekFilterBase<HistorianKey> timeFilter; // if (interval.Ticks != 0) // timeFilter = TimestampFilter.CreateFromIntervalData<HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); // else // timeFilter = TimestampFilter.CreateFromRange<HistorianKey>(startTime, stopTime); // var points = m_selectedMeasurements.Select((x) => (ulong)x.PointID).ToArray(); // var pointFilter = PointIDFilter.CreateFromList<HistorianKey>(points); // var database = client.GetDefaultDatabase(); // var frames = database.GetFrames(timeFilter, pointFilter).RoundToTolerance(1); // using (var csvStream = new StreamWriter("C:\\temp\\file.csv")) // { // //csvStream.AutoFlush = false; // csvStream.Write("Timestamp,"); // foreach (var signal in m_selectedMeasurements) // { // csvStream.Write(signal.Description); // csvStream.Write(','); // } // csvStream.WriteLine(); // foreach (var frame in frames) // { // csvStream.Write(frame.Key.ToString("MM/dd/yyyy hh:mm:ss.fffffff")); // csvStream.Write(','); // foreach (var signal in m_selectedMeasurements) // { // HistorianValueStruct value; // if (frame.Value.Points.TryGetValue((ulong)signal.PointID, out value)) // { // csvStream.Write(value.AsSingle); // } // csvStream.Write(','); // } // csvStream.WriteLine(); // } // csvStream.Flush(); // } // database.Disconnect(); // } //} //private void BtnExport_Click(object sender, EventArgs e) //{ // Settings.Default.Save(); // if (m_meta == null) // { // MessageBox.Show("Please download the metadata first."); // return; // } // if (m_selectedMeasurements == null || m_selectedMeasurements.Count == 0) // { // MessageBox.Show("There are no measurements to extract"); // return; // } // DateTime startTime = dateStartTime.Value; // DateTime stopTime = dateStopTime.Value; // if (startTime > stopTime) // { // MessageBox.Show("Start and Stop times are invalid"); // return; // } // TimeSpan interval = Resolutions.GetInterval((string)cmbResolution.SelectedItem); // HistorianClientOptions clientOptions = new HistorianClientOptions(); // clientOptions.DefaultDatabase = Settings.Default.HistorianInstanceName; // clientOptions.NetworkPort = Settings.Default.HistorianStreamingPort; // clientOptions.ServerNameOrIp = Settings.Default.ServerIP; // using (HistorianClient client = new HistorianClient(clientOptions)) // { // KeySeekFilterBase<HistorianKey> timeFilter; // if (interval.Ticks != 0) // timeFilter = TimestampFilter.CreateFromIntervalData<HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); // else // timeFilter = TimestampFilter.CreateFromRange<HistorianKey>(startTime, stopTime); // var points = m_selectedMeasurements.Select((x) => (ulong)x.PointID).ToArray(); // var pointFilter = PointIDFilter.CreateFromList<HistorianKey>(points); // var database = client.GetDefaultDatabase(); // using (var frameReader = database.GetPointStream(timeFilter, pointFilter).GetFrameReader()) // using (var csvStream = new StreamWriter("C:\\temp\\file.csv")) // { // var ultraStream = new UltraStreamWriter(csvStream); // //csvStream.AutoFlush = false; // csvStream.Write("Timestamp,"); // foreach (var signal in m_selectedMeasurements) // { // csvStream.Write(signal.Description); // csvStream.Write(','); // } // csvStream.WriteLine(); // while (frameReader.Read()) // { // csvStream.Write(frameReader.FrameTime.ToString("MM/dd/yyyy hh:mm:ss.fffffff")); // csvStream.Write(','); // foreach (var signal in m_selectedMeasurements) // { // HistorianValueStruct value; // if (frameReader.Frame.TryGetValue((ulong)signal.PointID, out value)) // { // ultraStream.Write(value.AsSingle); // //csvStream.Write(value.AsSingle); // } // //csvStream.Write(','); // ultraStream.Write(','); // } // ultraStream.Flush(); // csvStream.WriteLine(); // } // csvStream.Flush(); // } // database.Disconnect(); // } //} //private void BtnExport_Click(object sender, EventArgs e) //{ // Settings.Default.Save(); // if (m_meta == null) // { // MessageBox.Show("Please download the metadata first."); // return; // } // if (m_selectedMeasurements == null || m_selectedMeasurements.Count == 0) // { // MessageBox.Show("There are no measurements to extract"); // return; // } // DateTime startTime = dateStartTime.Value; // DateTime stopTime = dateStopTime.Value; // if (startTime > stopTime) // { // MessageBox.Show("Start and Stop times are invalid"); // return; // } // TimeSpan interval = Resolutions.GetInterval((string)cmbResolution.SelectedItem); // HistorianClientOptions clientOptions = new HistorianClientOptions(); // clientOptions.DefaultDatabase = Settings.Default.HistorianInstanceName; // clientOptions.NetworkPort = Settings.Default.HistorianStreamingPort; // clientOptions.ServerNameOrIp = Settings.Default.ServerIP; // using (HistorianClient client = new HistorianClient(clientOptions)) // { // m_readIndex = 0; // m_fillMeasurements.Clear(); // m_measurementsInOrder.Clear(); // foreach (var signal in m_selectedMeasurements) // { // var m = new Measurements(); // m_fillMeasurements.Add((ulong)signal.PointID, m); // m_measurementsInOrder.Add(m); // } // KeySeekFilterBase<HistorianKey> timeFilter; // if (interval.Ticks != 0) // timeFilter = TimestampFilter.CreateFromIntervalData<HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); // else // timeFilter = TimestampFilter.CreateFromRange<HistorianKey>(startTime, stopTime); // var points = m_selectedMeasurements.Select((x) => (ulong)x.PointID).ToArray(); // var pointFilter = PointIDFilter.CreateFromList<HistorianKey>(points); // var database = client.GetDefaultDatabase(); // using (var fillAdapter = database.GetPointStream(timeFilter, pointFilter).GetFillAdapter()) // using (var csvStream = new StreamWriter("C:\\temp\\file.csv")) // { // var ultraStream = new UltraStreamWriter(csvStream); // //csvStream.AutoFlush = false; // csvStream.Write("Timestamp,"); // foreach (var signal in m_selectedMeasurements) // { // csvStream.Write(signal.Description); // csvStream.Write(','); // } // csvStream.WriteLine(); // m_readIndex++; // while (fillAdapter.Fill(FillData)) // { // csvStream.Write(fillAdapter.FrameTime.ToString("MM/dd/yyyy hh:mm:ss.fffffff")); // csvStream.Write(','); // foreach (var signal in m_measurementsInOrder) // { // if (signal.ReadNumber == m_readIndex) // { // ultraStream.Write(signal.Value); // } // ultraStream.Write(','); // } // ultraStream.Flush(); // csvStream.WriteLine(); // m_readIndex++; // } // csvStream.Flush(); // } // database.Disconnect(); // } //} private void BtnExport_Click(object sender, EventArgs e) { Settings.Default.Save(); if (m_meta == null) { MessageBox.Show("Please download the metadata first."); return; } if (m_selectedMeasurements == null || m_selectedMeasurements.Count == 0) { MessageBox.Show("There are no measurements to extract"); return; } DateTime startTime = dateStartTime.Value; DateTime stopTime = dateStopTime.Value; if (startTime > stopTime) { MessageBox.Show("Start and Stop times are invalid"); return; } BtnExport.Tag = BtnExport.Text; BtnExport.Text = "Exporting..."; BtnExport.Enabled = false; Cursor.Current = Cursors.WaitCursor; Application.DoEvents(); TimeSpan interval = Resolutions.GetInterval((string)cmbResolution.SelectedItem); Thread workerThread = new Thread(start => { long processingStartTime = DateTime.UtcNow.Ticks; using (HistorianClient client = new HistorianClient(TxtServerIP.Text, int.Parse(TxtHistorianPort.Text))) { m_readIndex = 0; m_fillMeasurements.Clear(); m_measurementsInOrder.Clear(); foreach (var signal in m_selectedMeasurements) { var m = new Measurements(); m_fillMeasurements.Add((ulong)signal.PointID, m); m_measurementsInOrder.Add(m); } SeekFilterBase <HistorianKey> timeFilter; if (interval.Ticks != 0) { timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } else { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } var points = m_selectedMeasurements.Select((x) => (ulong)x.PointID).ToArray(); var pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(points); using (var database = client.GetDatabase <HistorianKey, HistorianValue>(TxtHistorianInstance.Text)) { string fileName = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.DesktopDirectory), "Export.csv"); using (var fillAdapter = database.GetPointStream(timeFilter, pointFilter).GetFillAdapter()) using (var csvStream = new StreamWriter(fileName)) { var ultraStream = new UltraStreamWriter(csvStream); //csvStream.AutoFlush = false; csvStream.Write("Timestamp,"); foreach (var signal in m_selectedMeasurements) { csvStream.Write(signal.Description); csvStream.Write(','); } csvStream.WriteLine(); m_readIndex++; while (fillAdapter.Fill(FillData)) { csvStream.Write(fillAdapter.FrameTime.ToString("MM/dd/yyyy hh:mm:ss.fffffff")); csvStream.Write(','); foreach (var signal in m_measurementsInOrder) { if (signal.ReadNumber == m_readIndex) { ultraStream.Write(signal.Value); } ultraStream.Write(','); } ultraStream.Flush(); csvStream.WriteLine(); m_readIndex++; } csvStream.Flush(); } } } Ticks runtime = DateTime.UtcNow.Ticks - processingStartTime; BeginInvoke(new Action <Ticks>(r => MessageBox.Show(r.ToElapsedTimeString(2), "Processing Time", MessageBoxButtons.OK, MessageBoxIcon.Information)), runtime); BeginInvoke(new Action(RestoreExportButton)); }); workerThread.Start(); }
/// <summary> /// Queries the provided signals within a the provided time window [Inclusive]. /// With this method, the signals will be strong typed and therefore can be converted. /// </summary> /// <param name="database"></param> /// <param name="startTime">the lower bound of the time</param> /// <param name="endTime">the upper bound of the time. [Inclusive]</param> /// <param name="signals">an IEnumerable of all of the signals to query as part of the results set.</param> /// <returns></returns> public static Dictionary <ulong, SignalDataBase> GetSignals(this IDatabaseReader <HistorianKey, HistorianValue> database, ulong startTime, ulong endTime, IEnumerable <ISignalWithType> signals) { return(database.GetSignals(TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, endTime), signals, SortedTreeEngineReaderOptions.Default)); }
public static IDictionary <Guid, SignalDataBase> GetSignalsWithCalculations(this ClientDatabaseBase <HistorianKey, HistorianValue> database, ulong startTime, ulong endTime, IEnumerable <ISignalCalculation> signals) { return(database.GetSignalsWithCalculations(TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, endTime), signals, SortedTreeEngineReaderOptions.Default)); }
/// <summary> /// Updates the chart after changes have been made to the chart boundaries or the data displayed in the chart. /// </summary> public void UpdateChart() { if ((object)m_archiveReader == null) { return; } Cursor windowCursor = Cursor; Cursor = Cursors.Wait; int colorIndex = 0; DateTime startTime = m_xAxis.Minimum.GetValueOrDefault(DateTime.MinValue); DateTime endTime = m_xAxis.Maximum.GetValueOrDefault(DateTime.MaxValue); m_chart.Series.Clear(); SeekFilterBase <HistorianKey> timeFilter; if (m_chartResolution.Ticks != 0) { BaselineTimeInterval interval = BaselineTimeInterval.Second; if (m_chartResolution.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (m_chartResolution.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (m_chartResolution.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); endTime = endTime.BaselinedTimestamp(interval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, endTime, m_chartResolution, new TimeSpan(TimeSpan.TicksPerMillisecond)); } else { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, endTime); } MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(m_visiblePoints.Select(point => point.Value.PointID)); TreeStream <HistorianKey, HistorianValue> stream; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); Dictionary <ulong, List <DataPointWrapper> > data = new Dictionary <ulong, List <DataPointWrapper> >(m_visiblePoints.Count); Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(); Dictionary <ulong, long> intervals = new Dictionary <ulong, long>(); List <DataPointWrapper> values; long pointCount; if (m_sampleSize > 1) { // Count total data points to reduce total points to chart stream = m_archiveReader.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value)) { pointCounts[key.PointID] = pointCounts.GetOrAdd(key.PointID, 0L) + 1; } foreach (ulong pointID in pointCounts.Keys) { intervals[pointID] = (pointCounts[pointID] / m_sampleSize) + 1; } } // Load data into dictionary stream = m_archiveReader.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value)) { values = data.GetOrAdd(key.PointID, id => new List <DataPointWrapper>()); pointCount = pointCounts[key.PointID]; if (m_sampleSize < 2 || pointCount++ % intervals[key.PointID] == 0) { values.Add(new DataPointWrapper(key, value)); } pointCounts[key.PointID] = pointCount; } foreach (KeyValuePair <ulong, Metadata> measurement in m_visiblePoints) { if (data.TryGetValue(measurement.Key, out values)) { LineSeries series = new LineSeries(); // Change how data points are displayed. series.DataPointStyle = new Style(typeof(LineDataPoint)); series.DataPointStyle.Setters.Add(new Setter(BackgroundProperty, new SolidColorBrush(m_lineColors[colorIndex]))); series.DataPointStyle.Setters.Add(new Setter(TemplateProperty, new ControlTemplate())); colorIndex++; if (colorIndex >= m_lineColors.Count) { colorIndex = 0; } // Set the title of the series as it will appear in the legend. series.Title = measurement.Value.SignalReference; // Set chart data series.ItemsSource = values; series.IndependentValuePath = "Time"; series.DependentValuePath = "Value"; // Add the series to the chart. m_chart.Series.Add(series); } } UpdateLayout(); Cursor = windowCursor; OnChartUpdated(); }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="server">The server to use for the query.</param> /// <param name="instanceName">Name of the archive to be queried.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable <TrendValue> GetHistorianData(SnapServer server, string instanceName, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if (cancellationToken == null) { cancellationToken = new CancellationToken(); } if (server == null) { yield break; } // Setting series limit to zero requests full resolution data, which overrides provided parameter if (seriesLimit < 1) { resolution = Resolution.Full; forceLimit = false; } TimeSpan resolutionInterval = resolution.GetInterval(); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); bool subFullResolution = false; // Set data scan resolution if (resolution != Resolution.Full) { subFullResolution = true; BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); } SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); Dictionary <ulong, DataRow> metadata = null; using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { if (database == null) { yield break; } if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out LocalOutputAdapter historianAdapter)) { metadata = historianAdapter?.Measurements; } if (metadata == null) { yield break; } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } else { measurementIDs = metadata.Keys.ToArray(); } Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(measurementIDs.Length); Dictionary <ulong, Tuple <float, float> > extremes = new Dictionary <ulong, Tuple <float, float> >(measurementIDs.Length); ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; float pointValue, min = 0.0F, max = 0.0F; foreach (ulong measurementID in measurementIDs) { pointCounts[measurementID] = 0L; } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; pointValue = value.AsSingle; if (subFullResolution) { Tuple <float, float> stats = extremes.GetOrAdd(pointID, _ => new Tuple <float, float>(float.MaxValue, float.MinValue)); min = stats.Item1; max = stats.Item2; if (pointValue < min) { min = pointValue; } if (pointValue > max) { max = pointValue; } if (min != float.MaxValue && max != float.MinValue) { pointValue = Math.Abs(max) > Math.Abs(min) ? max : min; } else if (min != float.MaxValue) { pointValue = min; } else if (max != float.MinValue) { pointValue = max; } } if (timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan) { pointCount++; if (forceLimit && pointCount > seriesLimit) { break; } yield return(new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = pointValue }); lastTimes[pointID] = timestamp; // Reset extremes at each point publication if (subFullResolution) { extremes[pointID] = new Tuple <float, float>(float.MaxValue, float.MinValue); } } else if (subFullResolution) { // Track extremes over interval extremes[pointID] = new Tuple <float, float>(min, max); } pointCounts[pointID] = pointCount; } } } }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementID">Measurement ID to test for data continuity.</param> /// <param name="resolution">Resolution for testing data.</param> /// <param name="expectedFullResolutionTicks">Expected number of ticks per interval at full resolution, e.g., 33,333 = 1/30 of a second representing a sampling interval of 30 times per second.</param> /// <returns>Enumeration of valid data ranges for specified time range.</returns> /// <remarks> /// 1 tick = 100 nanoseconds. /// </remarks> public static IEnumerable <Tuple <DateTime, DateTime> > GetContiguousDataRegions(Connection connection, DateTime startTime, DateTime stopTime, ulong measurementID, Resolution resolution, long expectedFullResolutionTicks = 333333) { // Setup time-range and point ID selections SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromPointID <HistorianKey, HistorianValue>(measurementID); HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); TimeSpan interval, tolerance; // Set data scan resolution if (resolution == Resolution.Full) { interval = new TimeSpan(expectedFullResolutionTicks); timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { interval = resolution.GetInterval(); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // PMUs times may float a little - provide a one millisecond tolerance window above the standard interval tolerance = interval.Add(TimeSpan.FromMilliseconds(1.0D)); DateTime lastStartTime = startTime; DateTime lastStopTime = startTime; DateTime nextExpectedTime = startTime; DateTime currentTime; long totalRegions = 0; // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); // Scan historian stream for given point over specified time range and data resolution while (stream.Read(key, value)) { currentTime = key.TimestampAsDate; // See if current time was not expected time and gap is larger than resolution tolerance - could simply // be user started with a time that was not aligned with desired resolution, hence the tolerance check if (currentTime != nextExpectedTime && currentTime - nextExpectedTime > tolerance) { if (lastStartTime != lastStopTime) { // Detected a data gap, return last contiguous region totalRegions++; yield return(new Tuple <DateTime, DateTime>(lastStartTime, lastStopTime)); } // Move start time to current value lastStartTime = currentTime; lastStopTime = lastStartTime; nextExpectedTime = lastStartTime + interval; } else { // Setup next expected timestamp nextExpectedTime += interval; lastStopTime = currentTime; } } // If no data gaps were detected, return a single value for full region for where there was data if (totalRegions == 0 && lastStartTime != lastStopTime) { yield return(new Tuple <DateTime, DateTime>(lastStartTime, lastStopTime)); } } }
/// <summary> /// Starts a query that will read data source values, given a set of point IDs and targets, over a time range. /// </summary> /// <param name="startTime">Start-time for query.</param> /// <param name="stopTime">Stop-time for query.</param> /// <param name="interval">Interval from Grafana request.</param> /// <param name="includePeaks">Flag that determines if decimated data should include min/max interval peaks over provided time range.</param> /// <param name="targetMap">Set of IDs with associated targets to query.</param> /// <returns>Queried data source data in terms of value and time.</returns> protected override IEnumerable <DataSourceValue> QueryDataSourceValues(DateTime startTime, DateTime stopTime, string interval, bool includePeaks, Dictionary <ulong, string> targetMap) { SnapServer server = GetAdapterInstance(InstanceName)?.Server?.Host; if (server == null) { yield break; } using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(InstanceName)) { if (database == null) { yield break; } if (!TryParseInterval(interval, out TimeSpan resolutionInterval)) { Resolution resolution = TrendValueAPI.EstimatePlotResolution(InstanceName, startTime, stopTime, targetMap.Keys); resolutionInterval = resolution.GetInterval(); } BaselineTimeInterval timeInterval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { timeInterval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { timeInterval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { timeInterval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(timeInterval); stopTime = stopTime.BaselinedTimestamp(timeInterval); if (startTime == stopTime) { stopTime = stopTime.AddSeconds(1.0D); } SeekFilterBase <HistorianKey> timeFilter; // Set timestamp filter resolution if (includePeaks || resolutionInterval == TimeSpan.Zero) { // Full resolution query timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { // Interval query timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(targetMap.Keys); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(targetMap.Count); Dictionary <ulong, Peak> peaks = new Dictionary <ulong, Peak>(targetMap.Count); ulong resolutionSpan = (ulong)resolutionInterval.Ticks; if (includePeaks) { resolutionSpan *= 2UL; } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); Peak peak = Peak.Default; while (stream.Read(key, value)) { ulong pointID = key.PointID; ulong timestamp = key.Timestamp; float pointValue = value.AsSingle; if (includePeaks) { peak = peaks.GetOrAdd(pointID, _ => new Peak()); peak.Set(pointValue, timestamp); } if (resolutionSpan > 0UL && timestamp - lastTimes.GetOrAdd(pointID, 0UL) < resolutionSpan) { continue; } // New value is ready for publication string target = targetMap[pointID]; MeasurementStateFlags flags = (MeasurementStateFlags)value.Value3; if (includePeaks) { if (peak.MinTimestamp > 0UL) { yield return(new DataSourceValue { Target = target, Value = peak.Min, Time = (peak.MinTimestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } if (peak.MaxTimestamp != peak.MinTimestamp) { yield return(new DataSourceValue { Target = target, Value = peak.Max, Time = (peak.MaxTimestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } peak.Reset(); } else { yield return(new DataSourceValue { Target = target, Value = pointValue, Time = (timestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } lastTimes[pointID] = timestamp; } } } }
protected override IEnumerable <DataSourceValue> QueryDataSourceValues(DateTime startTime, DateTime stopTime, string interval, bool decimate, Dictionary <ulong, string> targetMap) { SnapServer server = GetAdapterInstance(InstanceName)?.Server?.Host; if ((object)server == null) { yield break; } using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(InstanceName)) { if ((object)database == null) { yield break; } Resolution resolution = TrendValueAPI.EstimatePlotResolution(InstanceName, startTime, stopTime, targetMap.Keys); SeekFilterBase <HistorianKey> timeFilter; // Set data scan resolution if (!decimate || resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { TimeSpan resolutionInterval = resolution.GetInterval(); BaselineTimeInterval timeInterval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { timeInterval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { timeInterval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { timeInterval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(timeInterval); stopTime = stopTime.BaselinedTimestamp(timeInterval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(targetMap.Keys); // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); while (stream.Read(key, value)) { yield return(new DataSourceValue { Target = targetMap[key.PointID], Time = (key.Timestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle }); } } } }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="server">The server to use for the query.</param> /// <param name="instanceName">Name of the archive to be queried.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable <TrendValue> GetHistorianData(SnapServer server, string instanceName, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if (cancellationToken == null) { cancellationToken = new CancellationToken(); } if (server == null) { yield break; } // Setting series limit to zero requests full resolution data, which overrides provided parameter if (seriesLimit < 1) { resolution = Resolution.Full; } TimeSpan resolutionInterval = resolution.GetInterval(); SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } Dictionary <ulong, DataRow> metadata = null; using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { if (database == null) { yield break; } if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out LocalOutputAdapter historianAdapter)) { metadata = historianAdapter?.Measurements; } if (metadata == null) { yield break; } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } else { measurementIDs = metadata.Keys.ToArray(); } // Start stream reader for the provided time window and selected points Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, long> intervals = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(measurementIDs.Length); double range = (stopTime - startTime).TotalSeconds; ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; if (resolutionSpan <= 1UL) { resolutionSpan = Ticks.PerSecond; } if (seriesLimit < 1) { seriesLimit = 1; } // Estimate total measurement counts per point so decimation intervals for each series can be calculated foreach (ulong measurementID in measurementIDs) { if (resolution == Resolution.Full) { pointCounts[measurementID] = metadata.TryGetValue(measurementID, out DataRow row) ? (long)(int.Parse(row["FramesPerSecond"].ToString()) * range) : 2; } else { pointCounts[measurementID] = (long)(range / resolutionInterval.TotalSeconds.NotZero(1.0D)); } } foreach (ulong measurementID in pointCounts.Keys) { intervals[measurementID] = (pointCounts[measurementID] / seriesLimit).NotZero(1L); } using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; if (pointCount++ % intervals[pointID] == 0 || !forceLimit && timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan) { yield return new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle } } ; pointCounts[pointID] = pointCount; lastTimes[pointID] = timestamp; } } } } }
private async Task CopyModelAsCsvToStreamAsync(SecurityPrincipal securityPrincipal, NameValueCollection requestParameters, Stream responseStream, CancellationToken cancellationToken) { const double DefaultFrameRate = 30; const int DefaultTimestampSnap = 0; string dateTimeFormat = Program.Host.Model.Global.DateTimeFormat; // TODO: Improve operation for large point lists: // Pick-up "POST"ed parameters with a "genurl" param, then cache parameters // in a memory cache and return the unique URL (a string instead of a file) // with a "download" param and unique ID associated with cached parameters. // Then extract params based on unique ID and follow normal steps... // Note TSTolerance is in ms string pointIDsParam = requestParameters["PointIDs"]; string startTimeParam = requestParameters["StartTime"]; string endTimeParam = requestParameters["EndTime"]; string timestampSnapParam = requestParameters["TSSnap"]; string frameRateParam = requestParameters["FrameRate"]; string alignTimestampsParam = requestParameters["AlignTimestamps"]; string missingAsNaNParam = requestParameters["MissingAsNaN"]; string fillMissingTimestampsParam = requestParameters["FillMissingTimestamps"]; string instanceName = requestParameters["InstanceName"]; string toleranceParam = requestParameters["TSTolerance"]; ulong[] pointIDs; string headers; if (string.IsNullOrEmpty(pointIDsParam)) { throw new ArgumentNullException("PointIDs", "Cannot export data: no values were provided in \"PointIDs\" parameter."); } try { pointIDs = pointIDsParam.Split(',').Select(ulong.Parse).ToArray(); Array.Sort(pointIDs); } catch (Exception ex) { throw new ArgumentNullException("PointIDs", $"Cannot export data: failed to parse \"PointIDs\" parameter value \"{pointIDsParam}\": {ex.Message}"); } if (string.IsNullOrEmpty(startTimeParam)) { throw new ArgumentNullException("StartTime", "Cannot export data: no \"StartTime\" parameter value was specified."); } if (string.IsNullOrEmpty(pointIDsParam)) { throw new ArgumentNullException("EndTime", "Cannot export data: no \"EndTime\" parameter value was specified."); } DateTime startTime, endTime; try { startTime = DateTime.ParseExact(startTimeParam, dateTimeFormat, null, DateTimeStyles.AdjustToUniversal); } catch (Exception ex) { throw new ArgumentException($"Cannot export data: failed to parse \"StartTime\" parameter value \"{startTimeParam}\". Expected format is \"{dateTimeFormat}\". Error message: {ex.Message}", "StartTime", ex); } try { endTime = DateTime.ParseExact(endTimeParam, dateTimeFormat, null, DateTimeStyles.AdjustToUniversal); } catch (Exception ex) { throw new ArgumentException($"Cannot export data: failed to parse \"EndTime\" parameter value \"{endTimeParam}\". Expected format is \"{dateTimeFormat}\". Error message: {ex.Message}", "EndTime", ex); } if (startTime > endTime) { throw new ArgumentOutOfRangeException("StartTime", "Cannot export data: start time exceeds end time."); } using (DataContext dataContext = new DataContext()) { // Validate current user has access to requested data if (!dataContext.UserIsInRole(securityPrincipal, s_minimumRequiredRoles)) { throw new SecurityException($"Cannot export data: access is denied for user \"{Thread.CurrentPrincipal.Identity?.Name ?? "Undefined"}\", minimum required roles = {s_minimumRequiredRoles.ToDelimitedString(", ")}."); } headers = GetHeaders(dataContext, pointIDs.Select(id => (int)id)); } if (!double.TryParse(frameRateParam, out double frameRate)) { frameRate = DefaultFrameRate; } if (!int.TryParse(timestampSnapParam, out int timestampSnap)) { timestampSnap = DefaultTimestampSnap; } if (!double.TryParse(toleranceParam, out double tolerance)) { tolerance = 0.5; } int toleranceTicks = (int)Math.Ceiling(tolerance * Ticks.PerMillisecond); bool alignTimestamps = alignTimestampsParam?.ParseBoolean() ?? true; bool missingAsNaN = missingAsNaNParam?.ParseBoolean() ?? true; bool fillMissingTimestamps = alignTimestamps && (fillMissingTimestampsParam?.ParseBoolean() ?? false); if (string.IsNullOrEmpty(instanceName)) { instanceName = TrendValueAPI.DefaultInstanceName; } LocalOutputAdapter.Instances.TryGetValue(instanceName, out LocalOutputAdapter adapter); HistorianServer serverInstance = adapter?.Server; if (serverInstance == null) { throw new InvalidOperationException($"Cannot export data: failed to access internal historian server instance \"{instanceName}\"."); } const int TargetBufferSize = 524288; StringBuilder readBuffer = new StringBuilder(TargetBufferSize * 2); ManualResetEventSlim bufferReady = new ManualResetEventSlim(false); List <string> writeBuffer = new List <string>(); object writeBufferLock = new object(); bool readComplete = false; Task readTask = Task.Factory.StartNew(() => { try { using (SnapClient connection = SnapClient.Connect(serverInstance.Host)) { Dictionary <ulong, int> pointIDIndex = new Dictionary <ulong, int>(pointIDs.Length); float[] values = new float[pointIDs.Length]; for (int i = 0; i < pointIDs.Length; i++) { pointIDIndex.Add(pointIDs[i], i); } for (int i = 0; i < values.Length; i++) { values[i] = float.NaN; } ulong interval; if (Math.Abs(frameRate % 1) <= (double.Epsilon * 100)) { Ticks[] subseconds = Ticks.SubsecondDistribution((int)frameRate); interval = (ulong)(subseconds.Length > 1 ? subseconds[1].Value : Ticks.PerSecond); } else { interval = (ulong)(Math.Floor(1.0d / frameRate) * Ticks.PerSecond); } ulong lastTimestamp = 0; // Write data pages SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, endTime); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); HistorianKey historianKey = new HistorianKey(); HistorianValue historianValue = new HistorianValue(); // Write row values function Action bufferValues = () => { readBuffer.Append(missingAsNaN ? string.Join(",", values) : string.Join(",", values.Select(val => float.IsNaN(val) ? "" : $"{val}"))); if (readBuffer.Length < TargetBufferSize) { return; } lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); readBuffer.Clear(); bufferReady.Set(); }; using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { // Start stream reader for the provided time window and selected points TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); ulong timestamp = 0; // Adjust timestamp to use first timestamp as base bool adjustTimeStamp = true; long baseTime = startTime.Ticks; if (timestampSnap == 0) { adjustTimeStamp = false; baseTime = Ticks.RoundToSecondDistribution(startTime.Ticks, frameRate, startTime.Ticks - startTime.Ticks % Ticks.PerSecond); } else if (timestampSnap == 1) { adjustTimeStamp = true; } else if (timestampSnap == 2) { adjustTimeStamp = false; baseTime = startTime.Ticks; } while (stream.Read(historianKey, historianValue) && !cancellationToken.IsCancellationRequested) { if (alignTimestamps) { if (adjustTimeStamp) { adjustTimeStamp = false; baseTime = (long)historianKey.Timestamp; } // Make sure the timestamp is actually close enough to the distribution Ticks ticks = Ticks.ToSecondDistribution((long)historianKey.Timestamp, frameRate, baseTime, toleranceTicks); if (ticks == Ticks.MinValue) { continue; } timestamp = (ulong)ticks.Value; } else { timestamp = historianKey.Timestamp; } // Start a new row for each encountered new timestamp if (timestamp != lastTimestamp) { if (lastTimestamp > 0) { bufferValues(); } for (int i = 0; i < values.Length; i++) { values[i] = float.NaN; } if (fillMissingTimestamps && lastTimestamp > 0 && timestamp > lastTimestamp) { ulong difference = timestamp - lastTimestamp; if (difference > interval) { ulong interpolated = lastTimestamp; for (ulong i = 1; i < difference / interval; i++) { interpolated = (ulong)Ticks.RoundToSecondDistribution((long)(interpolated + interval), frameRate, startTime.Ticks).Value; readBuffer.Append($"{Environment.NewLine}{new DateTime((long)interpolated, DateTimeKind.Utc).ToString(dateTimeFormat)},"); bufferValues(); } } } readBuffer.Append($"{Environment.NewLine}{new DateTime((long)timestamp, DateTimeKind.Utc).ToString(dateTimeFormat)},"); lastTimestamp = timestamp; } // Save value to its column values[pointIDIndex[historianKey.PointID]] = historianValue.AsSingle; } if (timestamp > 0) { bufferValues(); } if (readBuffer.Length > 0) { lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); } } } } finally { readComplete = true; bufferReady.Set(); } }, cancellationToken); Task writeTask = Task.Factory.StartNew(() => { using (StreamWriter writer = new StreamWriter(responseStream)) { //Ticks exportStart = DateTime.UtcNow.Ticks; string[] localBuffer; // Write column headers writer.Write(headers); while ((writeBuffer.Count > 0 || !readComplete) && !cancellationToken.IsCancellationRequested) { bufferReady.Wait(cancellationToken); bufferReady.Reset(); lock (writeBufferLock) { localBuffer = writeBuffer.ToArray(); writeBuffer.Clear(); } foreach (string buffer in localBuffer) { writer.Write(buffer); } } // Flush stream writer.Flush(); //Debug.WriteLine("Export time: " + (DateTime.UtcNow.Ticks - exportStart).ToElapsedTimeString(3)); } }, cancellationToken); await readTask; await writeTask; }