protected override bool ReadNext(HistorianKey key, HistorianValue value) { if (m_stream.Read(CurrentKey, CurrentValue)) { IsValid = true; return(true); } else { IsValid = false; return(false); } }
public int[] MeasureBits(TreeStream<HistorianKey, HistorianValue> stream, int higherBits) { HistorianKey hkey = new HistorianKey(); HistorianValue hvalue = new HistorianValue(); int[] bucket = new int[1 << higherBits]; int shiftBits = 32 - higherBits; while (stream.Read(hkey,hvalue)) { uint value = (uint)hvalue.Value1 >> shiftBits; bucket[value]++; } return bucket; }
/// <summary> /// Parses an entire stream to count the number of points. Notice, this will /// enumerate the list and the list will have to be reset to be enumerated again. /// </summary> /// <typeparam name="TKey">The key type</typeparam> /// <typeparam name="TValue">The value type</typeparam> /// <param name="stream">The stream to enumerate</param> /// <returns>the number of items in the stream.</returns> public static long Count <TKey, TValue>(this TreeStream <TKey, TValue> stream) where TKey : class, new() where TValue : class, new() { TKey key = new TKey(); TValue value = new TValue(); long cnt = 0; while (stream.Read(key, value)) { cnt++; } return(cnt); }
public bool ReadNext(DataPoint point) { if (!m_stream.Read(m_key, m_value)) { return(false); } point.Timestamp = m_key.Timestamp; point.PointID = m_key.PointID; point.Value = m_value.Value1; point.Flags = m_value.Value3; return(true); }
public int[] MeasureBits(TreeStream <HistorianKey, HistorianValue> stream, int higherBits) { HistorianKey hkey = new HistorianKey(); HistorianValue hvalue = new HistorianValue(); int[] bucket = new int[1 << higherBits]; int shiftBits = 32 - higherBits; while (stream.Read(hkey, hvalue)) { uint value = (uint)hvalue.Value1 >> shiftBits; bucket[value]++; } return(bucket); }
/// <summary> /// Queries the provided signals within a the provided time window [Inclusive] /// </summary> /// <param name="database"></param> /// <param name="startTime">the lower bound of the time</param> /// <param name="endTime">the upper bound of the time. [Inclusive]</param> /// <param name="signals">an IEnumerable of all of the signals to query as part of the results set.</param> /// <returns></returns> public static Dictionary <ulong, RawSignalTimeValue> GetRawSignals(this ClientDatabaseBase <HistorianKey, HistorianValue> database, DateTime startTime, DateTime endTime, IEnumerable <ulong> signals) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); Dictionary <ulong, RawSignalTimeValue> results = signals.ToDictionary((x) => x, (x) => new RawSignalTimeValue()); TreeStream <HistorianKey, HistorianValue> stream = database.Read((ulong)startTime.Ticks, (ulong)endTime.Ticks, signals); while (stream.Read(key, value)) { results[key.PointID].Signals.Add(key.TimestampAsDate, value.ToStruct()); } return(results); }
public bool Load(TreeStream <HistorianKey, HistorianValue> stream) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); if (stream.Read(key, value)) { Key1 = key.Timestamp; Key2 = key.PointID; Value1 = value.Value3; Value2 = value.Value1; return(true); } return(false); }
/// <summary> /// Writes the tree stream to the database. /// </summary> /// <param name="stream">all of the key/value pairs to add to the database.</param> public override void Write(TreeStream <TKey, TValue> stream) { if (m_reader != null) { throw new Exception("Sockets do not support writing while a reader is open. Dispose of reader."); } m_stream.Write((byte)ServerCommand.Write); m_encodingMode.ResetEncoder(); while (stream.Read(m_tmpKey, m_tmpValue)) { m_encodingMode.Encode(m_stream, m_tmpKey, m_tmpValue); } m_encodingMode.WriteEndOfStream(m_stream); m_stream.Flush(); }
public void TestReadData() { using (HistorianServer server = new HistorianServer(new HistorianServerDatabaseConfig("DB", @"c:\temp\Scada\", false), 1234)) { using (SnapClient client = SnapClient.Connect(server.Host)) { ClientDatabaseBase <HistorianKey, HistorianValue> database = client.GetDatabase <HistorianKey, HistorianValue>("DB"); TreeStream <HistorianKey, HistorianValue> stream = database.Read(10, 800 - 1); HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); while (stream.Read(key, value)) { Console.WriteLine(key.Timestamp); } } } }
public void TestReadData() { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); var settings = new HistorianServerDatabaseConfig("PPA", @"c:\temp\Scada\", true); using (HistorianServer server = new HistorianServer(settings)) { double count = 0; DebugStopwatch sw = new DebugStopwatch(); double time = sw.TimeEvent(() => { count = 0; using (HistorianClient client = new HistorianClient("127.0.0.1", 12345)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = client.GetDatabase <HistorianKey, HistorianValue>(String.Empty)) { //IHistorianDatabase<HistorianKey, HistorianValue> database = server.GetDefaultDatabase();//.GetDatabase(); //TreeStream<HistorianKey, HistorianValue> stream = reader.Read(0, ulong.MaxValue, new ulong[] { 2 }); TreeStream <HistorianKey, HistorianValue> stream = database.Read(0, ulong.MaxValue); while (stream.Read(key, value)) { count++; } } }); Console.WriteLine((count / 1000000 / time).ToString() + " Million PPS"); } //Console.WriteLine("KeyMethodsBase calls"); //for (int x = 0; x < 23; x++) //{ // Console.WriteLine(TreeKeyMethodsBase<HistorianKey>.CallMethods[x] + "\t" + ((TreeKeyMethodsBase<HistorianKey>.Method)(x)).ToString()); //} //Console.WriteLine("ValueMethodsBase calls"); //for (int x = 0; x < 5; x++) //{ // Console.WriteLine(TreeValueMethodsBase<HistorianValue>.CallMethods[x] + "\t" + ((TreeValueMethodsBase<HistorianValue>.Method)(x)).ToString()); //} //for (int x = 0; x < 15; x++) //{ // Console.WriteLine(BinaryStreamBase.CallMethods[x] + "\t" + ((BinaryStreamBase.Method)(x)).ToString()); //} }
public bool ReadNext(DataPoint point) { if ((object)m_stream == null) { throw new NullReferenceException("Stream is not initialized"); } if (!m_stream.Read(m_key, m_value)) { return(false); } point.Timestamp = m_key.Timestamp; point.PointID = m_key.PointID; point.Value = m_value.Value1; point.Flags = m_value.Value3; return(true); }
public void TestReadData() { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); HistorianServerDatabaseConfig settings = new HistorianServerDatabaseConfig("DB", @"c:\temp\Scada\", true); using (HistorianServer server = new HistorianServer(settings, 12345)) { using (HistorianClient client = new HistorianClient("127.0.0.1", 12345)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = client.GetDatabase <HistorianKey, HistorianValue>("DB")) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(0, 1000); while (stream.Read(key, value)) { Console.WriteLine(key.Timestamp); } } } }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Comma separated list of measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <returns>Enumeration of <see cref="IMeasurement"/> values read for time range.</returns> /// <example> /// <code> /// using (var connection = new Connection("127.0.0.1", "PPA")) /// foreach(var measurement in GetHistorianData(connection, DateTime.UtcNow.AddMinutes(-1.0D), DateTime.UtcNow)) /// Console.WriteLine("{0}:{1} @ {2} = {3}, quality: {4}", measurement.Key.Source, measurement.Key.ID, measurement.Timestamp, measurement.Value, measurement.StateFlags); /// </code> /// </example> public static IEnumerable <IMeasurement> GetHistorianData(Connection connection, DateTime startTime, DateTime stopTime, string measurementIDs = null, Resolution resolution = Resolution.Full) { SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolution.GetInterval(), new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections if (!string.IsNullOrEmpty(measurementIDs)) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs.Split(',').Select(ulong.Parse)); } // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value)) { yield return new Measurement() { Metadata = MeasurementKey.LookUpOrCreate(connection.InstanceName, (uint)key.PointID).Metadata, Timestamp = key.TimestampAsDate, Value = value.AsSingle, StateFlags = (MeasurementStateFlags)value.Value3 } } ; } }
private void BuildListOfAllPoints() { HashSet <ulong> keys = new HashSet <ulong>(); SnapClient client = SnapClient.Connect(m_archiveFile.Host); ClientDatabaseBase <HistorianKey, HistorianValue> db = client.GetDatabase <HistorianKey, HistorianValue>(""); TreeStream <HistorianKey, HistorianValue> scanner = db.Read(0, ulong.MaxValue); HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); while (scanner.Read(key, value)) { keys.Add(key.PointID); } List <ulong> AllKeys = keys.ToList(); AllKeys.Sort(); chkAllPoints.Items.Clear(); AllKeys.ForEach((x) => chkAllPoints.Items.Add(x)); db.Dispose(); client.Dispose(); }
bool ProcessRead(TreeStream <TKey, TValue> scanner) { TKey key = new TKey(); TValue value = new TValue(); //int loop = 0; while (scanner.Read(key, value)) { m_encodingMethod.Encode(m_stream, key, value); //ToDo: Incorporate some way to cancel a stream. //loop++; //if (loop > 1000) //{ // loop = 0; // if (m_stream.AvailableReadBytes > 0) // { // return false; // } //} } return(true); }
public static void CreateNonSequential(string pendingFileName, string completeFileName, int blockSize, Action <Guid> archiveIdCallback, EncodingDefinition treeNodeType, TreeStream <TKey, TValue> treeStream, params Guid[] flags) { SortedPointBuffer <TKey, TValue> m_queue; m_queue = new SortedPointBuffer <TKey, TValue>(100000, true); m_queue.IsReadingMode = false; TKey key = new TKey(); TValue value = new TValue(); List <SortedTreeTable <TKey, TValue> > pendingFiles = new List <SortedTreeTable <TKey, TValue> >(); try { while (treeStream.Read(key, value)) { if (m_queue.IsFull) { pendingFiles.Add(CreateMemoryFile(treeNodeType, m_queue)); } m_queue.TryEnqueue(key, value); } if (m_queue.Count > 0) { pendingFiles.Add(CreateMemoryFile(treeNodeType, m_queue)); } using (UnionTreeStream <TKey, TValue> reader = new UnionTreeStream <TKey, TValue>(pendingFiles.Select(x => new ArchiveTreeStreamWrapper <TKey, TValue>(x)), false)) { Create(pendingFileName, completeFileName, blockSize, archiveIdCallback, treeNodeType, reader, flags); } } finally { pendingFiles.ForEach(x => x.Dispose()); } }
private IEnumerable <IDataPoint> ReadDataStream(TreeStream <HistorianKey, HistorianValue> stream) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); List <ArchiveDataPoint> queriedData = new List <ArchiveDataPoint>(); ArchiveDataPoint point; MeasurementStateFlags stateFlags; while (stream.Read(key, value)) { point = new ArchiveDataPoint((int)key.PointID); point.Time = new TimeTag(new DateTime((long)key.Timestamp)); point.Value = BitConvert.ToSingle(value.Value1); stateFlags = (MeasurementStateFlags)value.Value3; if ((stateFlags & MeasurementStateFlags.BadData) == 0) { if ((stateFlags & MeasurementStateFlags.BadTime) == 0) { point.Quality = Quality.Good; } else { point.Quality = Quality.Old; } } else { point.Quality = Quality.SuspectData; } queriedData.Add(point); } return(queriedData); }
public static void ReadAllPoints() { Stopwatch sw = new Stopwatch(); int pointCount = 0; HistorianServerDatabaseConfig settings = new HistorianServerDatabaseConfig("PPA", @"C:\Program Files\openHistorian\Archive\", true); using (HistorianServer server = new HistorianServer(settings)) { DateTime start = DateTime.FromBinary(Convert.ToDateTime("2/1/2014").Date.Ticks + Convert.ToDateTime("6:00:00PM").TimeOfDay.Ticks).ToUniversalTime(); using (HistorianClient client = new HistorianClient("127.0.0.1", 12345)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = client.GetDatabase <HistorianKey, HistorianValue>(String.Empty)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); sw.Start(); TreeStream <HistorianKey, HistorianValue> scan = database.Read((ulong)start.Ticks, ulong.MaxValue); while (scan.Read(key, value) && pointCount < 10000000) { pointCount++; } sw.Stop(); //sw.Start(); //using (var frameReader = database.GetPointStream(DateTime.MinValue, DateTime.MaxValue)) //{ // while (frameReader.Read()) // ; //} //sw.Stop(); } } Console.WriteLine(pointCount); Console.WriteLine(sw.Elapsed.TotalSeconds.ToString()); Console.WriteLine((pointCount / sw.Elapsed.TotalSeconds / 1000000).ToString()); }
/// <summary> /// Queries the provided signals within a the provided time window [Inclusive] /// </summary> /// <param name="database"></param> /// <param name="startTime">the lower bound of the time</param> /// <param name="endTime">the upper bound of the time. [Inclusive]</param> /// <param name="signals">an IEnumerable of all of the signals to query as part of the results set.</param> /// <returns></returns> public static Dictionary <ulong, SignalDataBase> GetSignals(this IDatabaseReader <HistorianKey, HistorianValue> database, ulong startTime, ulong endTime, IEnumerable <ulong> signals) { HistorianKey key = new HistorianKey(); HistorianValue hvalue = new HistorianValue(); Dictionary <ulong, SignalDataBase> results = signals.ToDictionary((x) => x, (x) => (SignalDataBase) new SignalDataUnknown()); TreeStream <HistorianKey, HistorianValue> stream = database.Read(startTime, endTime, signals); ulong time, point, quality, value; while (stream.Read(key, hvalue)) { time = key.Timestamp; point = key.PointID; quality = hvalue.Value3; value = hvalue.Value1; results.AddSignalIfExists(time, point, value); } foreach (SignalDataBase signal in results.Values) { signal.Completed(); } return(results); }
/// <summary> /// Queries the provided signals within a the time described by the <see cref="QueryFilterTimestamp"/>. /// With this method, the signals will be strong typed and therefore can be converted. /// </summary> /// <param name="database"></param> /// <param name="timestamps">a <see cref="QueryFilterTimestamp"/> that describes how a signal will be parsed</param> /// <param name="signals">an IEnumerable of all of the signals to query as part of the results set.</param> /// <param name="readerOptions">The options that will be used when querying this data.</param> /// <returns></returns> public static Dictionary <ulong, SignalDataBase> GetSignals(this IDatabaseReader <HistorianKey, HistorianValue> database, SeekFilterBase <HistorianKey> timestamps, IEnumerable <ISignalWithType> signals, SortedTreeEngineReaderOptions readerOptions) { Dictionary <ulong, SignalDataBase> results = new Dictionary <ulong, SignalDataBase>(); foreach (ISignalWithType pt in signals) { if (pt.HistorianId.HasValue) { if (!results.ContainsKey(pt.HistorianId.Value)) { results.Add(pt.HistorianId.Value, new SignalData(pt.Functions)); } } } HistorianKey key = new HistorianKey(); HistorianValue hvalue = new HistorianValue(); MatchFilterBase <HistorianKey, HistorianValue> keyParser = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(signals.Where((x) => x.HistorianId.HasValue).Select((x) => x.HistorianId.Value)); TreeStream <HistorianKey, HistorianValue> stream = database.Read(readerOptions, timestamps, keyParser); ulong time, point, quality, value; while (stream.Read(key, hvalue)) { time = key.Timestamp; point = key.PointID; quality = hvalue.Value3; value = hvalue.Value1; results.AddSignalIfExists(time, point, value); } foreach (SignalDataBase signal in results.Values) { signal.Completed(); } return(results); }
void ReaderThread() { int threadId = Interlocked.Increment(ref ThreadNumber); try { DateTime start = DateTime.FromBinary(Convert.ToDateTime("2/1/2014").Date.Ticks + Convert.ToDateTime("6:00:00PM").TimeOfDay.Ticks).ToUniversalTime(); while (!StopReading) { int myId = Interlocked.Increment(ref ReaderNumber); Stopwatch sw = new Stopwatch(); int pointCount = 0; using (HistorianClient client = new HistorianClient("127.0.0.1", 12345)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = client.GetDatabase <HistorianKey, HistorianValue>(String.Empty)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); sw.Start(); TreeStream <HistorianKey, HistorianValue> scan = database.Read((ulong)start.Ticks, ulong.MaxValue);//, new ulong[] { 65, 953, 5562 }); while (scan.Read(key, value) && pointCount < PointsToRead) { pointCount++; } sw.Stop(); } //Console.WriteLine("Thread: " + threadId.ToString() + " " + "Run Number: " + myId.ToString() + " " + (pointCount / sw.Elapsed.TotalSeconds / 1000000).ToString()); } } catch (Exception) { //Console.WriteLine(ex.ToString()); } Console.WriteLine("Thread: " + threadId.ToString() + " Quit"); }
public static void TestReadPoints2() { int pointCount = 0; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); HistorianServerDatabaseConfig settings = new HistorianServerDatabaseConfig("PPA", @"C:\Program Files\openHistorian\Archive\", true); using (HistorianServer server = new HistorianServer(settings)) { Stopwatch sw = new Stopwatch(); sw.Start(); using (HistorianClient client = new HistorianClient("127.0.0.1", 12345)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = client.GetDatabase <HistorianKey, HistorianValue>(String.Empty)) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(0, (ulong)DateTime.MaxValue.Ticks, new ulong[] { 65, 953, 5562 }); while (stream.Read(key, value)) { pointCount++; } } sw.Stop(); //MessageBox.Show(sw.Elapsed.TotalSeconds.ToString()); } }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="server">The server to use for the query.</param> /// <param name="instanceName">Name of the archive to be queried.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable <TrendValue> GetHistorianData(SnapServer server, string instanceName, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if (cancellationToken == null) { cancellationToken = new CancellationToken(); } if (server == null) { yield break; } // Setting series limit to zero requests full resolution data, which overrides provided parameter if (seriesLimit < 1) { resolution = Resolution.Full; } TimeSpan resolutionInterval = resolution.GetInterval(); SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } Dictionary <ulong, DataRow> metadata = null; using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { if (database == null) { yield break; } if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out LocalOutputAdapter historianAdapter)) { metadata = historianAdapter?.Measurements; } if (metadata == null) { yield break; } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } else { measurementIDs = metadata.Keys.ToArray(); } // Start stream reader for the provided time window and selected points Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, long> intervals = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(measurementIDs.Length); double range = (stopTime - startTime).TotalSeconds; ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; if (resolutionSpan <= 1UL) { resolutionSpan = Ticks.PerSecond; } if (seriesLimit < 1) { seriesLimit = 1; } // Estimate total measurement counts per point so decimation intervals for each series can be calculated foreach (ulong measurementID in measurementIDs) { if (resolution == Resolution.Full) { pointCounts[measurementID] = metadata.TryGetValue(measurementID, out DataRow row) ? (long)(int.Parse(row["FramesPerSecond"].ToString()) * range) : 2; } else { pointCounts[measurementID] = (long)(range / resolutionInterval.TotalSeconds.NotZero(1.0D)); } } foreach (ulong measurementID in pointCounts.Keys) { intervals[measurementID] = (pointCounts[measurementID] / seriesLimit).NotZero(1L); } using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; if (pointCount++ % intervals[pointID] == 0 || !forceLimit && timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan) { yield return new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle } } ; pointCounts[pointID] = pointCount; lastTimes[pointID] = timestamp; } } } } }
protected override IEnumerable <DataSourceValue> QueryDataSourceValues(DateTime startTime, DateTime stopTime, string interval, bool decimate, Dictionary <ulong, string> targetMap) { SnapServer server = GetAdapterInstance(InstanceName)?.Server?.Host; if ((object)server == null) { yield break; } using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(InstanceName)) { if ((object)database == null) { yield break; } Resolution resolution = TrendValueAPI.EstimatePlotResolution(InstanceName, startTime, stopTime, targetMap.Keys); SeekFilterBase <HistorianKey> timeFilter; // Set data scan resolution if (!decimate || resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { TimeSpan resolutionInterval = resolution.GetInterval(); BaselineTimeInterval timeInterval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { timeInterval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { timeInterval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { timeInterval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(timeInterval); stopTime = stopTime.BaselinedTimestamp(timeInterval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(targetMap.Keys); // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); while (stream.Read(key, value)) { yield return(new DataSourceValue { Target = targetMap[key.PointID], Time = (key.Timestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle }); } } } }
/// <summary> /// Starts a query that will read data source values, given a set of point IDs and targets, over a time range. /// </summary> /// <param name="startTime">Start-time for query.</param> /// <param name="stopTime">Stop-time for query.</param> /// <param name="interval">Interval from Grafana request.</param> /// <param name="includePeaks">Flag that determines if decimated data should include min/max interval peaks over provided time range.</param> /// <param name="targetMap">Set of IDs with associated targets to query.</param> /// <returns>Queried data source data in terms of value and time.</returns> protected override IEnumerable <DataSourceValue> QueryDataSourceValues(DateTime startTime, DateTime stopTime, string interval, bool includePeaks, Dictionary <ulong, string> targetMap) { SnapServer server = GetAdapterInstance(InstanceName)?.Server?.Host; if (server == null) { yield break; } using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(InstanceName)) { if (database == null) { yield break; } if (!TryParseInterval(interval, out TimeSpan resolutionInterval)) { Resolution resolution = TrendValueAPI.EstimatePlotResolution(InstanceName, startTime, stopTime, targetMap.Keys); resolutionInterval = resolution.GetInterval(); } BaselineTimeInterval timeInterval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { timeInterval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { timeInterval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { timeInterval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(timeInterval); stopTime = stopTime.BaselinedTimestamp(timeInterval); if (startTime == stopTime) { stopTime = stopTime.AddSeconds(1.0D); } SeekFilterBase <HistorianKey> timeFilter; // Set timestamp filter resolution if (includePeaks || resolutionInterval == TimeSpan.Zero) { // Full resolution query timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { // Interval query timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // Setup point ID selections MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(targetMap.Keys); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(targetMap.Count); Dictionary <ulong, Peak> peaks = new Dictionary <ulong, Peak>(targetMap.Count); ulong resolutionSpan = (ulong)resolutionInterval.Ticks; if (includePeaks) { resolutionSpan *= 2UL; } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); Peak peak = Peak.Default; while (stream.Read(key, value)) { ulong pointID = key.PointID; ulong timestamp = key.Timestamp; float pointValue = value.AsSingle; if (includePeaks) { peak = peaks.GetOrAdd(pointID, _ => new Peak()); peak.Set(pointValue, timestamp); } if (resolutionSpan > 0UL && timestamp - lastTimes.GetOrAdd(pointID, 0UL) < resolutionSpan) { continue; } // New value is ready for publication string target = targetMap[pointID]; MeasurementStateFlags flags = (MeasurementStateFlags)value.Value3; if (includePeaks) { if (peak.MinTimestamp > 0UL) { yield return(new DataSourceValue { Target = target, Value = peak.Min, Time = (peak.MinTimestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } if (peak.MaxTimestamp != peak.MinTimestamp) { yield return(new DataSourceValue { Target = target, Value = peak.Max, Time = (peak.MaxTimestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } peak.Reset(); } else { yield return(new DataSourceValue { Target = target, Value = pointValue, Time = (timestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Flags = flags }); } lastTimes[pointID] = timestamp; } } } }
public static void Create(EncodingDefinition encodingMethod, BinaryStreamPointerBase stream, int blockSize, byte level, uint startingNodeIndex, Func <uint> getNextNewNodeIndex, SparseIndexWriter <TKey> sparseIndex, TreeStream <TKey, TValue> treeStream) { NodeHeader <TKey> header = new NodeHeader <TKey>(level, blockSize); PairEncodingBase <TKey, TValue> encoding = Library.Encodings.GetEncodingMethod <TKey, TValue>(encodingMethod); SparseIndexWriter <TKey> sparseIndex1 = sparseIndex; Func <uint> getNextNewNodeIndex1 = getNextNewNodeIndex; int maximumStorageSize = encoding.MaxCompressionSize; byte[] buffer1 = new byte[maximumStorageSize]; if ((header.BlockSize - header.HeaderSize) / maximumStorageSize < 4) { throw new Exception("Tree must have at least 4 records per node. Increase the block size or decrease the size of the records."); } //InsideNodeBoundary = m_BoundsFalse; header.NodeIndex = startingNodeIndex; header.RecordCount = 0; header.ValidBytes = (ushort)header.HeaderSize; header.LeftSiblingNodeIndex = uint.MaxValue; header.RightSiblingNodeIndex = uint.MaxValue; header.LowerKey.SetMin(); header.UpperKey.SetMax(); byte *writePointer = stream.GetWritePointer(blockSize * header.NodeIndex, blockSize); fixed(byte *buffer = buffer1) { TKey key1 = new TKey(); TKey key2 = new TKey(); TValue value1 = new TValue(); TValue value2 = new TValue(); key1.Clear(); key2.Clear(); value1.Clear(); value2.Clear(); Read1: //Read part 1. if (treeStream.Read(key1, value1)) { if (header.RemainingBytes < maximumStorageSize) { if (header.RemainingBytes < encoding.Encode(buffer, key2, value2, key1, value1)) { NewNodeThenInsert(header, sparseIndex1, getNextNewNodeIndex1(), writePointer, key1); key2.Clear(); value2.Clear(); writePointer = stream.GetWritePointer(blockSize * header.NodeIndex, blockSize); } } byte *stream1 = writePointer + header.ValidBytes; header.ValidBytes += (ushort)encoding.Encode(stream1, key2, value2, key1, value1); header.RecordCount++; //Read part 2. if (treeStream.Read(key2, value2)) { if (header.RemainingBytes < maximumStorageSize) { if (header.RemainingBytes < encoding.Encode(buffer, key1, value1, key2, value2)) { NewNodeThenInsert(header, sparseIndex1, getNextNewNodeIndex1(), writePointer, key2); key1.Clear(); value1.Clear(); writePointer = stream.GetWritePointer(blockSize * header.NodeIndex, blockSize); } } byte *stream2 = writePointer + header.ValidBytes; header.ValidBytes += (ushort)encoding.Encode(stream2, key1, value1, key2, value2); header.RecordCount++; goto Read1; } } } header.Save(writePointer); }
/// <summary> /// Starts a query that will read data source values, given a set of point IDs and targets, over a time range. /// </summary> /// <param name="startTime">Start-time for query.</param> /// <param name="stopTime">Stop-time for query.</param> /// <param name="interval">Interval from Grafana request.</param> /// <param name="decimate">Flag that determines if data should be decimated over provided time range.</param> /// <param name="targetMap">Set of IDs with associated targets to query.</param> /// <returns>Queried data source data in terms of value and time.</returns> protected override IEnumerable <DataSourceValue> QueryDataSourceValues(DateTime startTime, DateTime stopTime, string interval, bool decimate, Dictionary <ulong, string> targetMap) { SnapServer server = GetAdapterInstance(InstanceName)?.Server?.Host; if ((object)server == null) { yield break; } using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(InstanceName)) { if ((object)database == null) { yield break; } Resolution resolution = TrendValueAPI.EstimatePlotResolution(InstanceName, startTime, stopTime, targetMap.Keys); SeekFilterBase <HistorianKey> timeFilter; // Set data scan resolution if (!decimate || resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { TimeSpan resolutionInterval = resolution.GetInterval(); BaselineTimeInterval timeInterval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { timeInterval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { timeInterval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { timeInterval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(timeInterval); stopTime = stopTime.BaselinedTimestamp(timeInterval); int milliseconds = 1; try { ConfigurationFile configFile = ConfigurationFile.Open(AppDomain.CurrentDomain.SetupInformation.ConfigurationFile); CategorizedSettingsSection categorizedSettings = configFile.Settings; CategorizedSettingsElementCollection systemSettings = categorizedSettings["systemSettings"]; string val = systemSettings["HistoryTolerance"].Value; } catch { } // something went wrong, so just use original default timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond * milliseconds)); } // Setup point ID selections MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(targetMap.Keys); // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); while (stream.Read(key, value)) { yield return(new DataSourceValue { Target = targetMap[key.PointID], Time = (key.Timestamp - m_baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle, Flags = (MeasurementStateFlags)value.Value3 }); } } } }
private IEnumerable<IDataPoint> ReadDataStream(TreeStream<HistorianKey, HistorianValue> stream) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); List<ArchiveDataPoint> queriedData = new List<ArchiveDataPoint>(); ArchiveDataPoint point; MeasurementStateFlags stateFlags; while (stream.Read(key, value)) { point = new ArchiveDataPoint((int)key.PointID); point.Time = new TimeTag(new DateTime((long)key.Timestamp)); point.Value = BitConvert.ToSingle(value.Value1); stateFlags = (MeasurementStateFlags)value.Value3; if ((stateFlags & MeasurementStateFlags.BadData) == 0) { if ((stateFlags & MeasurementStateFlags.BadTime) == 0) point.Quality = Quality.Good; else point.Quality = Quality.Old; } else { point.Quality = Quality.SuspectData; } queriedData.Add(point); } return queriedData; }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="connection">openHistorian connection.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementID">Measurement ID to test for data continuity.</param> /// <param name="resolution">Resolution for testing data.</param> /// <param name="expectedFullResolutionTicks">Expected number of ticks per interval at full resolution, e.g., 33,333 = 1/30 of a second representing a sampling interval of 30 times per second.</param> /// <returns>Enumeration of valid data ranges for specified time range.</returns> /// <remarks> /// 1 tick = 100 nanoseconds. /// </remarks> public static IEnumerable <Tuple <DateTime, DateTime> > GetContiguousDataRegions(Connection connection, DateTime startTime, DateTime stopTime, ulong measurementID, Resolution resolution, long expectedFullResolutionTicks = 333333) { // Setup time-range and point ID selections SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromPointID <HistorianKey, HistorianValue>(measurementID); HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); TimeSpan interval, tolerance; // Set data scan resolution if (resolution == Resolution.Full) { interval = new TimeSpan(expectedFullResolutionTicks); timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { interval = resolution.GetInterval(); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } // PMUs times may float a little - provide a one millisecond tolerance window above the standard interval tolerance = interval.Add(TimeSpan.FromMilliseconds(1.0D)); DateTime lastStartTime = startTime; DateTime lastStopTime = startTime; DateTime nextExpectedTime = startTime; DateTime currentTime; long totalRegions = 0; // Start stream reader for the provided time window and selected points using (Database database = connection.OpenDatabase()) { TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); // Scan historian stream for given point over specified time range and data resolution while (stream.Read(key, value)) { currentTime = key.TimestampAsDate; // See if current time was not expected time and gap is larger than resolution tolerance - could simply // be user started with a time that was not aligned with desired resolution, hence the tolerance check if (currentTime != nextExpectedTime && currentTime - nextExpectedTime > tolerance) { if (lastStartTime != lastStopTime) { // Detected a data gap, return last contiguous region totalRegions++; yield return(new Tuple <DateTime, DateTime>(lastStartTime, lastStopTime)); } // Move start time to current value lastStartTime = currentTime; lastStopTime = lastStartTime; nextExpectedTime = lastStartTime + interval; } else { // Setup next expected timestamp nextExpectedTime += interval; lastStopTime = currentTime; } } // If no data gaps were detected, return a single value for full region for where there was data if (totalRegions == 0 && lastStartTime != lastStopTime) { yield return(new Tuple <DateTime, DateTime>(lastStartTime, lastStopTime)); } } }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="server">The server to use for the query.</param> /// <param name="instanceName">Name of the archive to be queried.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable <TrendValue> GetHistorianData(SnapServer server, string instanceName, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if (cancellationToken == null) { cancellationToken = new CancellationToken(); } if (server == null) { yield break; } // Setting series limit to zero requests full resolution data, which overrides provided parameter if (seriesLimit < 1) { resolution = Resolution.Full; forceLimit = false; } TimeSpan resolutionInterval = resolution.GetInterval(); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); bool subFullResolution = false; // Set data scan resolution if (resolution != Resolution.Full) { subFullResolution = true; BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); } SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); Dictionary <ulong, DataRow> metadata = null; using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { if (database == null) { yield break; } if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out LocalOutputAdapter historianAdapter)) { metadata = historianAdapter?.Measurements; } if (metadata == null) { yield break; } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } else { measurementIDs = metadata.Keys.ToArray(); } Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(measurementIDs.Length); Dictionary <ulong, Tuple <float, float> > extremes = new Dictionary <ulong, Tuple <float, float> >(measurementIDs.Length); ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; float pointValue, min = 0.0F, max = 0.0F; foreach (ulong measurementID in measurementIDs) { pointCounts[measurementID] = 0L; } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; pointValue = value.AsSingle; if (subFullResolution) { Tuple <float, float> stats = extremes.GetOrAdd(pointID, _ => new Tuple <float, float>(float.MaxValue, float.MinValue)); min = stats.Item1; max = stats.Item2; if (pointValue < min) { min = pointValue; } if (pointValue > max) { max = pointValue; } if (min != float.MaxValue && max != float.MinValue) { pointValue = Math.Abs(max) > Math.Abs(min) ? max : min; } else if (min != float.MaxValue) { pointValue = min; } else if (max != float.MinValue) { pointValue = max; } } if (timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan) { pointCount++; if (forceLimit && pointCount > seriesLimit) { break; } yield return(new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = pointValue }); lastTimes[pointID] = timestamp; // Reset extremes at each point publication if (subFullResolution) { extremes[pointID] = new Tuple <float, float>(float.MaxValue, float.MinValue); } } else if (subFullResolution) { // Track extremes over interval extremes[pointID] = new Tuple <float, float>(min, max); } pointCounts[pointID] = pointCount; } } } }
public void VerifyDB() { //Logger.ReportToConsole(VerboseLevel.All ^ VerboseLevel.DebugLow); //Logger.ConsoleSubscriber.AddIgnored(Logger.LookupType("GSF.SortedTreeStore")); Globals.MemoryPool.SetMaximumBufferSize(1000 * 1024 * 1024); Globals.MemoryPool.SetTargetUtilizationLevel(TargetUtilizationLevels.Low); HistorianServerDatabaseConfig settings = new HistorianServerDatabaseConfig("DB", "c:\\temp\\benchmark\\", true); using (SnapServer engine = new SnapServer(settings)) using (SnapClient client = SnapClient.Connect(engine)) using (ClientDatabaseBase <HistorianKey, HistorianValue> db = client.GetDatabase <HistorianKey, HistorianValue>("DB")) using (TreeStream <HistorianKey, HistorianValue> scan = db.Read(null, null, null)) { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); Stopwatch sw = new Stopwatch(); sw.Start(); for (int x = 0; x < PointsToArchive; x++) { if (!scan.Read(key, value)) { throw new Exception("Missing points"); } if (key.PointID != (ulong)x) { throw new Exception("Corrupt"); } if (key.Timestamp != 0) { throw new Exception("Corrupt"); } if (key.EntryNumber != 0) { throw new Exception("Corrupt"); } if (value.Value1 != 0) { throw new Exception("Corrupt"); } if (value.Value1 != 0) { throw new Exception("Corrupt"); } if (value.Value1 != 0) { throw new Exception("Corrupt"); } } double totalTime = sw.Elapsed.TotalSeconds; Console.WriteLine("Completed read test in {0:#,##0.00} seconds at {1:#,##0.00} points per second", totalTime, PointsToArchive / totalTime); if (scan.Read(key, value)) { throw new Exception("too many points"); } } }
private async Task CopyModelAsCsvToStreamAsync(SecurityPrincipal securityPrincipal, NameValueCollection requestParameters, Stream responseStream, CancellationToken cancellationToken) { const double DefaultFrameRate = 30; const int DefaultTimestampSnap = 0; string dateTimeFormat = Program.Host.Model.Global.DateTimeFormat; // TODO: Improve operation for large point lists: // Pick-up "POST"ed parameters with a "genurl" param, then cache parameters // in a memory cache and return the unique URL (a string instead of a file) // with a "download" param and unique ID associated with cached parameters. // Then extract params based on unique ID and follow normal steps... // Note TSTolerance is in ms string pointIDsParam = requestParameters["PointIDs"]; string startTimeParam = requestParameters["StartTime"]; string endTimeParam = requestParameters["EndTime"]; string timestampSnapParam = requestParameters["TSSnap"]; string frameRateParam = requestParameters["FrameRate"]; string alignTimestampsParam = requestParameters["AlignTimestamps"]; string missingAsNaNParam = requestParameters["MissingAsNaN"]; string fillMissingTimestampsParam = requestParameters["FillMissingTimestamps"]; string instanceName = requestParameters["InstanceName"]; string toleranceParam = requestParameters["TSTolerance"]; ulong[] pointIDs; string headers; if (string.IsNullOrEmpty(pointIDsParam)) { throw new ArgumentNullException("PointIDs", "Cannot export data: no values were provided in \"PointIDs\" parameter."); } try { pointIDs = pointIDsParam.Split(',').Select(ulong.Parse).ToArray(); Array.Sort(pointIDs); } catch (Exception ex) { throw new ArgumentNullException("PointIDs", $"Cannot export data: failed to parse \"PointIDs\" parameter value \"{pointIDsParam}\": {ex.Message}"); } if (string.IsNullOrEmpty(startTimeParam)) { throw new ArgumentNullException("StartTime", "Cannot export data: no \"StartTime\" parameter value was specified."); } if (string.IsNullOrEmpty(pointIDsParam)) { throw new ArgumentNullException("EndTime", "Cannot export data: no \"EndTime\" parameter value was specified."); } DateTime startTime, endTime; try { startTime = DateTime.ParseExact(startTimeParam, dateTimeFormat, null, DateTimeStyles.AdjustToUniversal); } catch (Exception ex) { throw new ArgumentException($"Cannot export data: failed to parse \"StartTime\" parameter value \"{startTimeParam}\". Expected format is \"{dateTimeFormat}\". Error message: {ex.Message}", "StartTime", ex); } try { endTime = DateTime.ParseExact(endTimeParam, dateTimeFormat, null, DateTimeStyles.AdjustToUniversal); } catch (Exception ex) { throw new ArgumentException($"Cannot export data: failed to parse \"EndTime\" parameter value \"{endTimeParam}\". Expected format is \"{dateTimeFormat}\". Error message: {ex.Message}", "EndTime", ex); } if (startTime > endTime) { throw new ArgumentOutOfRangeException("StartTime", "Cannot export data: start time exceeds end time."); } using (DataContext dataContext = new DataContext()) { // Validate current user has access to requested data if (!dataContext.UserIsInRole(securityPrincipal, s_minimumRequiredRoles)) { throw new SecurityException($"Cannot export data: access is denied for user \"{Thread.CurrentPrincipal.Identity?.Name ?? "Undefined"}\", minimum required roles = {s_minimumRequiredRoles.ToDelimitedString(", ")}."); } headers = GetHeaders(dataContext, pointIDs.Select(id => (int)id)); } if (!double.TryParse(frameRateParam, out double frameRate)) { frameRate = DefaultFrameRate; } if (!int.TryParse(timestampSnapParam, out int timestampSnap)) { timestampSnap = DefaultTimestampSnap; } if (!double.TryParse(toleranceParam, out double tolerance)) { tolerance = 0.5; } int toleranceTicks = (int)Math.Ceiling(tolerance * Ticks.PerMillisecond); bool alignTimestamps = alignTimestampsParam?.ParseBoolean() ?? true; bool missingAsNaN = missingAsNaNParam?.ParseBoolean() ?? true; bool fillMissingTimestamps = alignTimestamps && (fillMissingTimestampsParam?.ParseBoolean() ?? false); if (string.IsNullOrEmpty(instanceName)) { instanceName = TrendValueAPI.DefaultInstanceName; } LocalOutputAdapter.Instances.TryGetValue(instanceName, out LocalOutputAdapter adapter); HistorianServer serverInstance = adapter?.Server; if (serverInstance == null) { throw new InvalidOperationException($"Cannot export data: failed to access internal historian server instance \"{instanceName}\"."); } const int TargetBufferSize = 524288; StringBuilder readBuffer = new StringBuilder(TargetBufferSize * 2); ManualResetEventSlim bufferReady = new ManualResetEventSlim(false); List <string> writeBuffer = new List <string>(); object writeBufferLock = new object(); bool readComplete = false; Task readTask = Task.Factory.StartNew(() => { try { using (SnapClient connection = SnapClient.Connect(serverInstance.Host)) { Dictionary <ulong, int> pointIDIndex = new Dictionary <ulong, int>(pointIDs.Length); float[] values = new float[pointIDs.Length]; for (int i = 0; i < pointIDs.Length; i++) { pointIDIndex.Add(pointIDs[i], i); } for (int i = 0; i < values.Length; i++) { values[i] = float.NaN; } ulong interval; if (Math.Abs(frameRate % 1) <= (double.Epsilon * 100)) { Ticks[] subseconds = Ticks.SubsecondDistribution((int)frameRate); interval = (ulong)(subseconds.Length > 1 ? subseconds[1].Value : Ticks.PerSecond); } else { interval = (ulong)(Math.Floor(1.0d / frameRate) * Ticks.PerSecond); } ulong lastTimestamp = 0; // Write data pages SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, endTime); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); HistorianKey historianKey = new HistorianKey(); HistorianValue historianValue = new HistorianValue(); // Write row values function Action bufferValues = () => { readBuffer.Append(missingAsNaN ? string.Join(",", values) : string.Join(",", values.Select(val => float.IsNaN(val) ? "" : $"{val}"))); if (readBuffer.Length < TargetBufferSize) { return; } lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); readBuffer.Clear(); bufferReady.Set(); }; using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { // Start stream reader for the provided time window and selected points TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); ulong timestamp = 0; // Adjust timestamp to use first timestamp as base bool adjustTimeStamp = true; long baseTime = startTime.Ticks; if (timestampSnap == 0) { adjustTimeStamp = false; baseTime = Ticks.RoundToSecondDistribution(startTime.Ticks, frameRate, startTime.Ticks - startTime.Ticks % Ticks.PerSecond); } else if (timestampSnap == 1) { adjustTimeStamp = true; } else if (timestampSnap == 2) { adjustTimeStamp = false; baseTime = startTime.Ticks; } while (stream.Read(historianKey, historianValue) && !cancellationToken.IsCancellationRequested) { if (alignTimestamps) { if (adjustTimeStamp) { adjustTimeStamp = false; baseTime = (long)historianKey.Timestamp; } // Make sure the timestamp is actually close enough to the distribution Ticks ticks = Ticks.ToSecondDistribution((long)historianKey.Timestamp, frameRate, baseTime, toleranceTicks); if (ticks == Ticks.MinValue) { continue; } timestamp = (ulong)ticks.Value; } else { timestamp = historianKey.Timestamp; } // Start a new row for each encountered new timestamp if (timestamp != lastTimestamp) { if (lastTimestamp > 0) { bufferValues(); } for (int i = 0; i < values.Length; i++) { values[i] = float.NaN; } if (fillMissingTimestamps && lastTimestamp > 0 && timestamp > lastTimestamp) { ulong difference = timestamp - lastTimestamp; if (difference > interval) { ulong interpolated = lastTimestamp; for (ulong i = 1; i < difference / interval; i++) { interpolated = (ulong)Ticks.RoundToSecondDistribution((long)(interpolated + interval), frameRate, startTime.Ticks).Value; readBuffer.Append($"{Environment.NewLine}{new DateTime((long)interpolated, DateTimeKind.Utc).ToString(dateTimeFormat)},"); bufferValues(); } } } readBuffer.Append($"{Environment.NewLine}{new DateTime((long)timestamp, DateTimeKind.Utc).ToString(dateTimeFormat)},"); lastTimestamp = timestamp; } // Save value to its column values[pointIDIndex[historianKey.PointID]] = historianValue.AsSingle; } if (timestamp > 0) { bufferValues(); } if (readBuffer.Length > 0) { lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); } } } } finally { readComplete = true; bufferReady.Set(); } }, cancellationToken); Task writeTask = Task.Factory.StartNew(() => { using (StreamWriter writer = new StreamWriter(responseStream)) { //Ticks exportStart = DateTime.UtcNow.Ticks; string[] localBuffer; // Write column headers writer.Write(headers); while ((writeBuffer.Count > 0 || !readComplete) && !cancellationToken.IsCancellationRequested) { bufferReady.Wait(cancellationToken); bufferReady.Reset(); lock (writeBufferLock) { localBuffer = writeBuffer.ToArray(); writeBuffer.Clear(); } foreach (string buffer in localBuffer) { writer.Write(buffer); } } // Flush stream writer.Flush(); //Debug.WriteLine("Export time: " + (DateTime.UtcNow.Ticks - exportStart).ToElapsedTimeString(3)); } }, cancellationToken); await readTask; await writeTask; }