public void ConsoleTest3() { MemoryPoolTest.TestMemoryLeak(); ArchiveList <HistorianKey, HistorianValue> list = new ArchiveList <HistorianKey, HistorianValue>(null); DateTime start = DateTime.Now.Date; for (int x = 0; x < 3; x++) { SortedTreeTable <HistorianKey, HistorianValue> table1 = CreateTable(); AddDataTerminal(table1, (ulong)x, start, new TimeSpan(TimeSpan.TicksPerSecond), (ulong)(1000 * x), 1, 60 * 60); using (ArchiveListEditor <HistorianKey, HistorianValue> editor = list.AcquireEditLock()) { editor.Add(table1); } } for (int x = 0; x < 3; x++) { SortedTreeTable <HistorianKey, HistorianValue> table1 = CreateTable(); AddDataTerminal(table1, (ulong)x, start, new TimeSpan(TimeSpan.TicksPerSecond), (ulong)(1000 * x), 1, 60 * 60); using (ArchiveListEditor <HistorianKey, HistorianValue> editor = list.AcquireEditLock()) { editor.Add(table1); } } SeekFilterBase <HistorianKey> filter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(start, start.AddMinutes(10), new TimeSpan(TimeSpan.TicksPerSecond * 60), new TimeSpan(TimeSpan.TicksPerSecond)); SequentialReaderStream <HistorianKey, HistorianValue> sequencer = new SequentialReaderStream <HistorianKey, HistorianValue>(list, null, filter); SortedList <DateTime, FrameData> frames = sequencer.GetFrames(); WriteToConsole(frames); list.Dispose(); MemoryPoolTest.TestMemoryLeak(); }
private IEnumerable <TrendingDataPoint> Read(IEnumerable <ulong> measurementIDs, DateTime startTime, DateTime stopTime) { SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); if ((object)measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = m_database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { m_treeStreams.Add(stream); while (stream.Read(key, value)) { yield return(new TrendingDataPoint() { ChannelID = (int)key.PointID.HighDoubleWord(), SeriesID = (SeriesID)(int)key.PointID.LowDoubleWord(), Timestamp = key.TimestampAsDate, Value = value.AsSingle }); } } }
public IDictionary <Guid, SignalDataBase> GetQueryResult(DateTime startTime, DateTime endTime, int zoomLevel, IEnumerable <ISignalCalculation> signals) { using (ClientDatabaseBase <HistorianKey, HistorianValue> db = m_historian.GetDatabase <HistorianKey, HistorianValue>("PPA")) { //var db = m_historian.ConnectToDatabase("Full Resolution Synchrophasor"); PeriodicScanner scanner = new PeriodicScanner(m_samplesPerSecond); SeekFilterBase <HistorianKey> timestamps = scanner.GetParser(startTime, endTime, 1500u); SortedTreeEngineReaderOptions options = new SortedTreeEngineReaderOptions(TimeSpan.FromSeconds(1)); IDictionary <Guid, SignalDataBase> results = db.GetSignalsWithCalculations(timestamps, signals, options); return(results); } }
/// <summary> /// Creates a new <see cref="ReportHistorianReader"/>. /// </summary> /// <param name="server">Snapserver to connect to <see cref="SnapServer"/>.</param> /// <param name="instanceName">Name of the instance to connect to.</param> /// <param name="startTime">Starttime.</param> /// <param name="endTime">Endtime.</param> /// <param name="frameRate">SamplingRate of the signal.</param> /// <param name="pointIDs">PointIDs to be collected.</param> public ReportHistorianReader(SnapServer server, string instanceName, DateTime startTime, DateTime endTime, int frameRate, IEnumerable <ulong> pointIDs) { m_client = SnapClient.Connect(server); m_database = m_client.GetDatabase <HistorianKey, HistorianValue>(instanceName); m_key = new HistorianKey(); m_value = new HistorianValue(); SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(DataPoint.RoundTimestamp(startTime, frameRate), DataPoint.RoundTimestamp(endTime, frameRate)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); m_stream = m_database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); }
public SnapDBClient(string hostAddress, int port, string instanceName, ulong startTime, ulong endTime, int frameRate, IEnumerable <ulong> pointIDs) { m_client = new HistorianClient(hostAddress, port); m_database = m_client.GetDatabase <HistorianKey, HistorianValue>(instanceName); m_key = new HistorianKey(); m_value = new HistorianValue(); SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(DataPoint.RoundTimestamp(startTime, frameRate), DataPoint.RoundTimestamp(endTime, frameRate)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); m_stream = m_database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); }
public static IEnumerable GetHistorianDataSampled(SqlString historianServer, SqlString instanceName, DateTime startTime, DateTime stopTime, TimeSpan interval, [SqlFacet(MaxSize = -1)] SqlString measurementIDs) { const int DefaultHistorianPort = 38402; if (historianServer.IsNull || string.IsNullOrEmpty(historianServer.Value)) { throw new ArgumentNullException("historianServer", "Missing historian server parameter"); } if (instanceName.IsNull || string.IsNullOrEmpty(instanceName.Value)) { throw new ArgumentNullException("instanceName", "Missing historian instance name parameter"); } if (startTime > stopTime) { throw new ArgumentException("Invalid time range specified", "startTime"); } string[] parts = historianServer.Value.Split(':'); string hostName = parts[0]; if (parts.Length < 2 || !int.TryParse(parts[1], out int port)) { port = DefaultHistorianPort; } using (HistorianClient client = new HistorianClient(hostName, port)) using (ClientDatabaseBase <HistorianKey, HistorianValue> reader = client.GetDatabase <HistorianKey, HistorianValue>(instanceName.Value)) { SeekFilterBase <HistorianKey> timeFilter = interval.Ticks == 0 ? TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime) : TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, interval, new TimeSpan(TimeSpan.TicksPerMillisecond)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); if (!measurementIDs.IsNull && !string.IsNullOrEmpty(measurementIDs.Value)) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs.Value.Split(',').Select(ulong.Parse)); } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = reader.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value)) { yield return(new Measurement(key.PointID, key.TimestampAsDate, value.AsSingle)); } } } }
public void BenchmarkRealisticSamples() { MemoryPoolTest.TestMemoryLeak(); const int Max = 1000000; const int FileCount = 1000; ArchiveList <HistorianKey, HistorianValue> list = new ArchiveList <HistorianKey, HistorianValue>(null); DateTime start = DateTime.Now.Date; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); for (int x = 0; x < FileCount; x++) { SortedTreeTable <HistorianKey, HistorianValue> table1 = CreateTable(); AddData(table1, start.AddMinutes(2 * x), new TimeSpan(TimeSpan.TicksPerSecond), 60, 100, 1, Max / 60 / FileCount); using (ArchiveListEditor <HistorianKey, HistorianValue> editor = list.AcquireEditLock()) { editor.Add(table1); } } SeekFilterBase <HistorianKey> filter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(start, start.AddMinutes(2 * FileCount), new TimeSpan(TimeSpan.TicksPerSecond * 2), new TimeSpan(TimeSpan.TicksPerMillisecond)); SequentialReaderStream <HistorianKey, HistorianValue> sequencer = new SequentialReaderStream <HistorianKey, HistorianValue>(list, null, filter); DebugStopwatch sw = new DebugStopwatch(); int xi = 0; double sec = sw.TimeEvent(() => { SequentialReaderStream <HistorianKey, HistorianValue> scanner = sequencer; while (scanner.Read(key, value)) { xi++; } }); System.Console.WriteLine(Max / sec / 1000000); //TreeKeyMethodsBase<HistorianKey>.WriteToConsole(); //TreeValueMethodsBase<HistorianValue>.WriteToConsole(); //Console.WriteLine("KeyMethodsBase calls"); //for (int x = 0; x < 23; x++) //{ // Console.WriteLine(TreeKeyMethodsBase<HistorianKey>.CallMethods[x] + "\t" + ((TreeKeyMethodsBase<HistorianKey>.Method)(x)).ToString()); //} //Console.WriteLine("ValueMethodsBase calls"); //for (int x = 0; x < 5; x++) //{ // Console.WriteLine(TreeValueMethodsBase<HistorianValue>.CallMethods[x] + "\t" + ((TreeValueMethodsBase<HistorianValue>.Method)(x)).ToString()); //} list.Dispose(); MemoryPoolTest.TestMemoryLeak(); }
private SequentialReaderStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter, WorkerThreadSynchronization workerThreadSynchronization) { if (m_disposed) { throw new ObjectDisposedException(GetType().FullName); } Stats.QueriesExecuted++; return(new SequentialReaderStream <TKey, TValue>(m_archiveList, readerOptions, keySeekFilter, keyMatchFilter, workerThreadSynchronization)); }
public Ticks ReadBackHistorianData(HistorianIArchive archive, Action <int> updateProgressBar) { IEnumerable <ulong> points; if (m_settings.ReadFromCsv) { points = m_indexToPointIDLookup.Skip(1); // First value is always 0 because the timestamp is the first column } else { points = m_points; } if (points == null) { ShowMessage("Point list not initialized"); return(new Ticks(0)); } int count = 0; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); TreeStream <HistorianKey, HistorianValue> m_stream; SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(DataPoint.RoundTimestamp(m_startTime, m_settings.FrameRate), DataPoint.RoundTimestamp(m_endTime, m_settings.FrameRate)); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(points); m_stream = archive.ClientDatabase.Read(GSF.Snap.Services.Reader.SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); int messageInterval = points.Count() * m_settings.MessageInterval; DateTime startTime = DateTime.UtcNow; while (m_stream.Read(key, value)) { count++; if (count % messageInterval == 0) { PercentComplete = (int)((1.0D - (new Ticks(m_endTime.Ticks - (long)key.Timestamp).ToSeconds() / m_timeRange)) * 100.0D); ShowMessage($"{Environment.NewLine}{count} points read back so far, averaging {(count / (DateTime.UtcNow - startTime).TotalSeconds):N0} points per second."); updateProgressBar(PercentComplete); } } return(DateTime.UtcNow - startTime); }
public static IDictionary<Guid, SignalDataBase> GetSignalsWithCalculations(this ClientDatabaseBase<HistorianKey, HistorianValue> database, SeekFilterBase<HistorianKey> timestamps, IEnumerable<ISignalCalculation> signals, SortedTreeEngineReaderOptions readerOptions) { Dictionary<ulong, SignalDataBase> queryResults = database.GetSignals(timestamps, signals, readerOptions); Dictionary<Guid, SignalDataBase> calculatedResults = new Dictionary<Guid, SignalDataBase>(); foreach (ISignalCalculation signal in signals) { if (signal.HistorianId.HasValue) { calculatedResults.Add(signal.SignalId, queryResults[signal.HistorianId.Value]); } else { calculatedResults.Add(signal.SignalId, new SignalData(signal.Functions)); } } foreach (ISignalCalculation signal in signals) { signal.Calculate(calculatedResults); } return calculatedResults; }
public void TestIntervalRanges() { _ = new List <ulong>(); SeekFilterBase <HistorianKey> pointId = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(0, 100, 10, 3, 1); if (!pointId.GetType().FullName.Contains("IntervalRanges")) { throw new Exception("Wrong type"); } using (BinaryStream bs = new BinaryStream(allocatesOwnMemory: true)) { bs.Write(pointId.FilterType); pointId.Save(bs); bs.Position = 0; SeekFilterBase <HistorianKey> filter = Library.Filters.GetSeekFilter <HistorianKey>(bs.ReadGuid(), bs); if (!filter.GetType().FullName.Contains("IntervalRanges")) { throw new Exception("Wrong type"); } } }
private async Task CopyModelAsCsvToStreamAsync(SecurityPrincipal securityPrincipal, NameValueCollection requestParameters, Stream responseStream, CancellationToken cancellationToken) { const double DefaultFrameRate = 30; const int DefaultTimestampSnap = 0; string dateTimeFormat = Program.Host.Model.Global.DateTimeFormat; // TODO: Improve operation for large point lists: // Pick-up "POST"ed parameters with a "genurl" param, then cache parameters // in a memory cache and return the unique URL (a string instead of a file) // with a "download" param and unique ID associated with cached parameters. // Then extract params based on unique ID and follow normal steps... // Note TSTolerance is in ms string pointIDsParam = requestParameters["PointIDs"]; string startTimeParam = requestParameters["StartTime"]; string endTimeParam = requestParameters["EndTime"]; string timestampSnapParam = requestParameters["TSSnap"]; string frameRateParam = requestParameters["FrameRate"]; string alignTimestampsParam = requestParameters["AlignTimestamps"]; string missingAsNaNParam = requestParameters["MissingAsNaN"]; string fillMissingTimestampsParam = requestParameters["FillMissingTimestamps"]; string instanceName = requestParameters["InstanceName"]; string toleranceParam = requestParameters["TSTolerance"]; ulong[] pointIDs; string headers; if (string.IsNullOrEmpty(pointIDsParam)) { throw new ArgumentNullException("PointIDs", "Cannot export data: no values were provided in \"PointIDs\" parameter."); } try { pointIDs = pointIDsParam.Split(',').Select(ulong.Parse).ToArray(); Array.Sort(pointIDs); } catch (Exception ex) { throw new ArgumentNullException("PointIDs", $"Cannot export data: failed to parse \"PointIDs\" parameter value \"{pointIDsParam}\": {ex.Message}"); } if (string.IsNullOrEmpty(startTimeParam)) { throw new ArgumentNullException("StartTime", "Cannot export data: no \"StartTime\" parameter value was specified."); } if (string.IsNullOrEmpty(pointIDsParam)) { throw new ArgumentNullException("EndTime", "Cannot export data: no \"EndTime\" parameter value was specified."); } DateTime startTime, endTime; try { startTime = DateTime.ParseExact(startTimeParam, dateTimeFormat, null, DateTimeStyles.AdjustToUniversal); } catch (Exception ex) { throw new ArgumentException($"Cannot export data: failed to parse \"StartTime\" parameter value \"{startTimeParam}\". Expected format is \"{dateTimeFormat}\". Error message: {ex.Message}", "StartTime", ex); } try { endTime = DateTime.ParseExact(endTimeParam, dateTimeFormat, null, DateTimeStyles.AdjustToUniversal); } catch (Exception ex) { throw new ArgumentException($"Cannot export data: failed to parse \"EndTime\" parameter value \"{endTimeParam}\". Expected format is \"{dateTimeFormat}\". Error message: {ex.Message}", "EndTime", ex); } if (startTime > endTime) { throw new ArgumentOutOfRangeException("StartTime", "Cannot export data: start time exceeds end time."); } using (DataContext dataContext = new DataContext()) { // Validate current user has access to requested data if (!dataContext.UserIsInRole(securityPrincipal, s_minimumRequiredRoles)) { throw new SecurityException($"Cannot export data: access is denied for user \"{Thread.CurrentPrincipal.Identity?.Name ?? "Undefined"}\", minimum required roles = {s_minimumRequiredRoles.ToDelimitedString(", ")}."); } headers = GetHeaders(dataContext, pointIDs.Select(id => (int)id)); } if (!double.TryParse(frameRateParam, out double frameRate)) { frameRate = DefaultFrameRate; } if (!int.TryParse(timestampSnapParam, out int timestampSnap)) { timestampSnap = DefaultTimestampSnap; } if (!double.TryParse(toleranceParam, out double tolerance)) { tolerance = 0.5; } int toleranceTicks = (int)Math.Ceiling(tolerance * Ticks.PerMillisecond); bool alignTimestamps = alignTimestampsParam?.ParseBoolean() ?? true; bool missingAsNaN = missingAsNaNParam?.ParseBoolean() ?? true; bool fillMissingTimestamps = alignTimestamps && (fillMissingTimestampsParam?.ParseBoolean() ?? false); if (string.IsNullOrEmpty(instanceName)) { instanceName = TrendValueAPI.DefaultInstanceName; } LocalOutputAdapter.Instances.TryGetValue(instanceName, out LocalOutputAdapter adapter); HistorianServer serverInstance = adapter?.Server; if (serverInstance == null) { throw new InvalidOperationException($"Cannot export data: failed to access internal historian server instance \"{instanceName}\"."); } const int TargetBufferSize = 524288; StringBuilder readBuffer = new StringBuilder(TargetBufferSize * 2); ManualResetEventSlim bufferReady = new ManualResetEventSlim(false); List <string> writeBuffer = new List <string>(); object writeBufferLock = new object(); bool readComplete = false; Task readTask = Task.Factory.StartNew(() => { try { using (SnapClient connection = SnapClient.Connect(serverInstance.Host)) { Dictionary <ulong, int> pointIDIndex = new Dictionary <ulong, int>(pointIDs.Length); float[] values = new float[pointIDs.Length]; for (int i = 0; i < pointIDs.Length; i++) { pointIDIndex.Add(pointIDs[i], i); } for (int i = 0; i < values.Length; i++) { values[i] = float.NaN; } ulong interval; if (Math.Abs(frameRate % 1) <= (double.Epsilon * 100)) { Ticks[] subseconds = Ticks.SubsecondDistribution((int)frameRate); interval = (ulong)(subseconds.Length > 1 ? subseconds[1].Value : Ticks.PerSecond); } else { interval = (ulong)(Math.Floor(1.0d / frameRate) * Ticks.PerSecond); } ulong lastTimestamp = 0; // Write data pages SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, endTime); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(pointIDs); HistorianKey historianKey = new HistorianKey(); HistorianValue historianValue = new HistorianValue(); // Write row values function Action bufferValues = () => { readBuffer.Append(missingAsNaN ? string.Join(",", values) : string.Join(",", values.Select(val => float.IsNaN(val) ? "" : $"{val}"))); if (readBuffer.Length < TargetBufferSize) { return; } lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); readBuffer.Clear(); bufferReady.Set(); }; using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { // Start stream reader for the provided time window and selected points TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); ulong timestamp = 0; // Adjust timestamp to use first timestamp as base bool adjustTimeStamp = true; long baseTime = startTime.Ticks; if (timestampSnap == 0) { adjustTimeStamp = false; baseTime = Ticks.RoundToSecondDistribution(startTime.Ticks, frameRate, startTime.Ticks - startTime.Ticks % Ticks.PerSecond); } else if (timestampSnap == 1) { adjustTimeStamp = true; } else if (timestampSnap == 2) { adjustTimeStamp = false; baseTime = startTime.Ticks; } while (stream.Read(historianKey, historianValue) && !cancellationToken.IsCancellationRequested) { if (alignTimestamps) { if (adjustTimeStamp) { adjustTimeStamp = false; baseTime = (long)historianKey.Timestamp; } // Make sure the timestamp is actually close enough to the distribution Ticks ticks = Ticks.ToSecondDistribution((long)historianKey.Timestamp, frameRate, baseTime, toleranceTicks); if (ticks == Ticks.MinValue) { continue; } timestamp = (ulong)ticks.Value; } else { timestamp = historianKey.Timestamp; } // Start a new row for each encountered new timestamp if (timestamp != lastTimestamp) { if (lastTimestamp > 0) { bufferValues(); } for (int i = 0; i < values.Length; i++) { values[i] = float.NaN; } if (fillMissingTimestamps && lastTimestamp > 0 && timestamp > lastTimestamp) { ulong difference = timestamp - lastTimestamp; if (difference > interval) { ulong interpolated = lastTimestamp; for (ulong i = 1; i < difference / interval; i++) { interpolated = (ulong)Ticks.RoundToSecondDistribution((long)(interpolated + interval), frameRate, startTime.Ticks).Value; readBuffer.Append($"{Environment.NewLine}{new DateTime((long)interpolated, DateTimeKind.Utc).ToString(dateTimeFormat)},"); bufferValues(); } } } readBuffer.Append($"{Environment.NewLine}{new DateTime((long)timestamp, DateTimeKind.Utc).ToString(dateTimeFormat)},"); lastTimestamp = timestamp; } // Save value to its column values[pointIDIndex[historianKey.PointID]] = historianValue.AsSingle; } if (timestamp > 0) { bufferValues(); } if (readBuffer.Length > 0) { lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); } } } } finally { readComplete = true; bufferReady.Set(); } }, cancellationToken); Task writeTask = Task.Factory.StartNew(() => { using (StreamWriter writer = new StreamWriter(responseStream)) { //Ticks exportStart = DateTime.UtcNow.Ticks; string[] localBuffer; // Write column headers writer.Write(headers); while ((writeBuffer.Count > 0 || !readComplete) && !cancellationToken.IsCancellationRequested) { bufferReady.Wait(cancellationToken); bufferReady.Reset(); lock (writeBufferLock) { localBuffer = writeBuffer.ToArray(); writeBuffer.Clear(); } foreach (string buffer in localBuffer) { writer.Write(buffer); } } // Flush stream writer.Flush(); //Debug.WriteLine("Export time: " + (DateTime.UtcNow.Ticks - exportStart).ToElapsedTimeString(3)); } }, cancellationToken); await readTask; await writeTask; }
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <param name="timestamps">the timestamps to query for</param> /// <param name="points">the points to query</param> /// <param name="options">A list of query options</param> /// <returns></returns> public static SortedList <DateTime, FrameData> GetFrames(this IDatabaseReader <HistorianKey, HistorianValue> database, SortedTreeEngineReaderOptions options, SeekFilterBase <HistorianKey> timestamps, MatchFilterBase <HistorianKey, HistorianValue> points) { return(database.Read(options, timestamps, points).GetFrames()); }
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <returns></returns> public static SortedList <DateTime, FrameData> GetFrames(this IDatabaseReader <HistorianKey, HistorianValue> database, SeekFilterBase <HistorianKey> timestamps, params ulong[] points) { return(database.GetFrames(SortedTreeEngineReaderOptions.Default, timestamps, PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(points))); }
public static TreeStream <TKey, TValue> Read <TKey, TValue>(this IDatabaseReader <TKey, TValue> reader, SeekFilterBase <TKey> timeFilter) where TKey : TimestampPointIDBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { return(reader.Read(SortedTreeEngineReaderOptions.Default, timeFilter, null)); }
public static TreeStream <TKey, TValue> Read <TKey, TValue>(this IDatabaseReader <TKey, TValue> reader, SeekFilterBase <TKey> key1, IEnumerable <ulong> pointIds) where TKey : TimestampPointIDBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { return(reader.Read(SortedTreeEngineReaderOptions.Default, key1, PointIdMatchFilter.CreateFromList <TKey, TValue>(pointIds.ToList()))); }
/// <summary> /// Reads data from the SortedTreeEngine with the provided read options and server side filters. /// </summary> /// <param name="readerOptions">read options supplied to the reader. Can be null.</param> /// <param name="keySeekFilter">a seek based filter to follow. Can be null.</param> /// <param name="keyMatchFilter">a match based filer to follow. Can be null.</param> /// <returns>A stream that will read the specified data.</returns> public abstract TreeStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter);
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <param name="timestamps">the timestamps to query for</param> /// <param name="points">the points to query</param> /// <param name="options">A list of query options</param> /// <returns></returns> public static SortedList<DateTime, FrameData> GetFrames(this IDatabaseReader<HistorianKey, HistorianValue> database, SortedTreeEngineReaderOptions options, SeekFilterBase<HistorianKey> timestamps, MatchFilterBase<HistorianKey,HistorianValue> points) { return database.Read(options, timestamps, points).GetFrames(); }
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <returns></returns> public static SortedList<DateTime, FrameData> GetFrames(this IDatabaseReader<HistorianKey, HistorianValue> database, SeekFilterBase<HistorianKey> timestamps, params ulong[] points) { return database.GetFrames(SortedTreeEngineReaderOptions.Default, timestamps, PointIdMatchFilter.CreateFromList<HistorianKey, HistorianValue>(points)); }
/// <summary> /// Reads data from the SortedTreeEngine with the provided read options and server side filters. /// </summary> /// <param name="readerOptions">read options supplied to the reader. Can be null.</param> /// <param name="keySeekFilter">a seek based filter to follow. Can be null.</param> /// <param name="keyMatchFilter">a match based filer to follow. Can be null.</param> /// <returns>A stream that will read the specified data.</returns> public override TreeStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter) { if (m_reader != null) { throw new Exception("Sockets do not support concurrent readers. Dispose of old reader."); } m_stream.Write((byte)ServerCommand.Read); if (keySeekFilter == null) { m_stream.Write(false); } else { m_stream.Write(true); m_stream.Write(keySeekFilter.FilterType); keySeekFilter.Save(m_stream); } if (keyMatchFilter == null) { m_stream.Write(false); } else { m_stream.Write(true); m_stream.Write(keyMatchFilter.FilterType); keyMatchFilter.Save(m_stream); } if (readerOptions == null) { m_stream.Write(false); } else { m_stream.Write(true); readerOptions.Save(m_stream); } m_stream.Flush(); var command = (ServerResponse)m_stream.ReadUInt8(); switch (command) { case ServerResponse.UnhandledException: string exception = m_stream.ReadString(); throw new Exception("Server UnhandledExcetion: \n" + exception); case ServerResponse.UnknownOrCorruptSeekFilter: throw new Exception("Server does not recgonize the seek filter"); case ServerResponse.UnknownOrCorruptMatchFilter: throw new Exception("Server does not recgonize the match filter"); case ServerResponse.UnknownOrCorruptReaderOptions: throw new Exception("Server does not recgonize the reader options"); case ServerResponse.SerializingPoints: break; case ServerResponse.ErrorWhileReading: exception = m_stream.ReadString(); throw new Exception("Server Error While Reading: \n" + exception); default: throw new Exception("Unknown server response: " + command.ToString()); } m_reader = new PointReader(m_encodingMode, m_stream, () => m_reader = null); return(m_reader); }
public TreeStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter, WorkerThreadSynchronization workerThreadSynchronization) { if (m_disposed) { throw new ObjectDisposedException(GetType().FullName); } SequentialReaderStream <TKey, TValue> stream = m_server.Read(readerOptions, keySeekFilter, keyMatchFilter, workerThreadSynchronization); if (!stream.EOS) { stream.Disposed += OnStreamDisposal; lock (m_syncRoot) m_openStreams.Add(stream); } return(stream); }
/// <summary> /// Gets frames from the historian as individual frames. /// </summary> /// <param name="database">the database to use</param> /// <param name="timestamps">the timestamps to query for</param> /// <param name="points">the points to query</param> /// <param name="options">A list of query options</param> /// <returns></returns> public static PointStream GetPointStream(this IDatabaseReader <HistorianKey, HistorianValue> database, SortedTreeEngineReaderOptions options, SeekFilterBase <HistorianKey> timestamps, MatchFilterBase <HistorianKey, HistorianValue> points) { return(new PointStream(database, database.Read(options, timestamps, points))); }
/// <summary> /// Reads data from the SortedTreeEngine with the provided read options and server side filters. /// </summary> /// <param name="readerOptions">read options supplied to the reader. Can be null.</param> /// <param name="keySeekFilter">a seek based filter to follow. Can be null.</param> /// <param name="keyMatchFilter">a match based filer to follow. Can be null.</param> /// <returns>A stream that will read the specified data.</returns> public override TreeStream <TKey, TValue> Read(SortedTreeEngineReaderOptions readerOptions, SeekFilterBase <TKey> keySeekFilter, MatchFilterBase <TKey, TValue> keyMatchFilter) { return(Read(readerOptions, keySeekFilter, keyMatchFilter, null)); }
/// <summary> /// Queries the provided signals within a the time described by the <see cref="QueryFilterTimestamp"/>. /// With this method, the signals will be strong typed and therefore can be converted. /// </summary> /// <param name="database"></param> /// <param name="timestamps">a <see cref="QueryFilterTimestamp"/> that describes how a signal will be parsed</param> /// <param name="signals">an IEnumerable of all of the signals to query as part of the results set.</param> /// <param name="readerOptions">The options that will be used when querying this data.</param> /// <returns></returns> public static Dictionary <ulong, SignalDataBase> GetSignals(this IDatabaseReader <HistorianKey, HistorianValue> database, SeekFilterBase <HistorianKey> timestamps, IEnumerable <ISignalWithType> signals, SortedTreeEngineReaderOptions readerOptions) { Dictionary <ulong, SignalDataBase> results = new Dictionary <ulong, SignalDataBase>(); foreach (ISignalWithType pt in signals) { if (pt.HistorianId.HasValue) { if (!results.ContainsKey(pt.HistorianId.Value)) { results.Add(pt.HistorianId.Value, new SignalData(pt.Functions)); } } } HistorianKey key = new HistorianKey(); HistorianValue hvalue = new HistorianValue(); MatchFilterBase <HistorianKey, HistorianValue> keyParser = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(signals.Where((x) => x.HistorianId.HasValue).Select((x) => x.HistorianId.Value)); TreeStream <HistorianKey, HistorianValue> stream = database.Read(readerOptions, timestamps, keyParser); ulong time, point, quality, value; while (stream.Read(key, hvalue)) { time = key.Timestamp; point = key.PointID; quality = hvalue.Value3; value = hvalue.Value1; results.AddSignalIfExists(time, point, value); } foreach (SignalDataBase signal in results.Values) { signal.Completed(); } return(results); }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="server">The server to use for the query.</param> /// <param name="instanceName">Name of the archive to be queried.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable <TrendValue> GetHistorianData(SnapServer server, string instanceName, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if (cancellationToken == null) { cancellationToken = new CancellationToken(); } if (server == null) { yield break; } // Setting series limit to zero requests full resolution data, which overrides provided parameter if (seriesLimit < 1) { resolution = Resolution.Full; forceLimit = false; } TimeSpan resolutionInterval = resolution.GetInterval(); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); bool subFullResolution = false; // Set data scan resolution if (resolution != Resolution.Full) { subFullResolution = true; BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); } SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); Dictionary <ulong, DataRow> metadata = null; using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { if (database == null) { yield break; } if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out LocalOutputAdapter historianAdapter)) { metadata = historianAdapter?.Measurements; } if (metadata == null) { yield break; } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } else { measurementIDs = metadata.Keys.ToArray(); } Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(measurementIDs.Length); Dictionary <ulong, Tuple <float, float> > extremes = new Dictionary <ulong, Tuple <float, float> >(measurementIDs.Length); ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; float pointValue, min = 0.0F, max = 0.0F; foreach (ulong measurementID in measurementIDs) { pointCounts[measurementID] = 0L; } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; pointValue = value.AsSingle; if (subFullResolution) { Tuple <float, float> stats = extremes.GetOrAdd(pointID, _ => new Tuple <float, float>(float.MaxValue, float.MinValue)); min = stats.Item1; max = stats.Item2; if (pointValue < min) { min = pointValue; } if (pointValue > max) { max = pointValue; } if (min != float.MaxValue && max != float.MinValue) { pointValue = Math.Abs(max) > Math.Abs(min) ? max : min; } else if (min != float.MaxValue) { pointValue = min; } else if (max != float.MinValue) { pointValue = max; } } if (timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan) { pointCount++; if (forceLimit && pointCount > seriesLimit) { break; } yield return(new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = pointValue }); lastTimes[pointID] = timestamp; // Reset extremes at each point publication if (subFullResolution) { extremes[pointID] = new Tuple <float, float>(float.MaxValue, float.MinValue); } } else if (subFullResolution) { // Track extremes over interval extremes[pointID] = new Tuple <float, float>(min, max); } pointCounts[pointID] = pointCount; } } } }
public SequentialReaderStream(ArchiveList <TKey, TValue> archiveList, SortedTreeEngineReaderOptions readerOptions = null, SeekFilterBase <TKey> keySeekFilter = null, MatchFilterBase <TKey, TValue> keyMatchFilter = null, WorkerThreadSynchronization workerThreadSynchronization = null) { if (readerOptions is null) { readerOptions = SortedTreeEngineReaderOptions.Default; } if (keySeekFilter is null) { keySeekFilter = new SeekFilterUniverse <TKey>(); } if (keyMatchFilter is null) { keyMatchFilter = new MatchFilterUniverse <TKey, TValue>(); } if (workerThreadSynchronization is null) { m_ownsWorkerThreadSynchronization = true; workerThreadSynchronization = new WorkerThreadSynchronization(); } m_workerThreadSynchronization = workerThreadSynchronization; m_pointCount = 0; m_keySeekFilter = keySeekFilter; m_keyMatchFilter = keyMatchFilter; m_keyMatchIsUniverse = m_keyMatchFilter as MatchFilterUniverse <TKey, TValue> != null; if (readerOptions.Timeout.Ticks > 0) { m_timeout = new TimeoutOperation(); m_timeout.RegisterTimeout(readerOptions.Timeout, () => m_timedOut = true); } m_snapshot = archiveList.CreateNewClientResources(); m_snapshot.UpdateSnapshot(); m_tablesOrigList = new List <BufferedArchiveStream <TKey, TValue> >(); for (int x = 0; x < m_snapshot.Tables.Count(); x++) { ArchiveTableSummary <TKey, TValue> table = m_snapshot.Tables[x]; if (table != null) { if (table.Contains(keySeekFilter.StartOfRange, keySeekFilter.EndOfRange)) { try { m_tablesOrigList.Add(new BufferedArchiveStream <TKey, TValue>(x, table)); } catch (Exception e) { //ToDo: Make sure firstkey.tostring doesn't ever throw an exception. StringBuilder sb = new StringBuilder(); sb.AppendLine($"Archive ID {table.FileId}"); sb.AppendLine($"First Key {table.FirstKey.ToString()}"); sb.AppendLine($"Last Key {table.LastKey.ToString()}"); sb.AppendLine($"File Size {table.SortedTreeTable.BaseFile.ArchiveSize}"); sb.AppendLine($"File Name {table.SortedTreeTable.BaseFile.FilePath}"); Log.Publish(MessageLevel.Error, "Error while reading file", sb.ToString(), null, e); } } else { m_snapshot.Tables[x] = null; } } } m_sortedArchiveStreams = new CustomSortHelper <BufferedArchiveStream <TKey, TValue> >(m_tablesOrigList, IsLessThan); m_keySeekFilter.Reset(); if (m_keySeekFilter.NextWindow()) { SeekToKey(m_keySeekFilter.StartOfFrame); } else { Dispose(); } }
private bool ProcessRead() { SeekFilterBase <TKey> key1Parser = null; MatchFilterBase <TKey, TValue> key2Parser = null; SortedTreeEngineReaderOptions readerOptions = null; if (m_stream.ReadBoolean()) { try { key1Parser = Library.Filters.GetSeekFilter <TKey>(m_stream.ReadGuid(), m_stream); } catch { m_stream.Write((byte)ServerResponse.UnknownOrCorruptSeekFilter); m_stream.Flush(); return(false); } } if (m_stream.ReadBoolean()) { try { key2Parser = Library.Filters.GetMatchFilter <TKey, TValue>(m_stream.ReadGuid(), m_stream); } catch { m_stream.Write((byte)ServerResponse.UnknownOrCorruptMatchFilter); m_stream.Flush(); return(false); } } if (m_stream.ReadBoolean()) { try { readerOptions = new SortedTreeEngineReaderOptions(m_stream); } catch { m_stream.Write((byte)ServerResponse.UnknownOrCorruptReaderOptions); m_stream.Flush(); return(false); } } bool needToFinishStream = false; try { using (TreeStream <TKey, TValue> scanner = m_sortedTreeEngine.Read(readerOptions, key1Parser, key2Parser)) { m_stream.Write((byte)ServerResponse.SerializingPoints); m_encodingMethod.ResetEncoder(); //if (m_encodingMethod.SupportsPointerSerialization) // ProcessReadWithPointers(scanner); //else needToFinishStream = true; bool wasCanceled = !ProcessRead(scanner); m_encodingMethod.WriteEndOfStream(m_stream); needToFinishStream = false; if (wasCanceled) { m_stream.Write((byte)ServerResponse.CanceledRead); } else { m_stream.Write((byte)ServerResponse.ReadComplete); } m_stream.Flush(); return(true); } } catch (Exception ex) { if (needToFinishStream) { m_encodingMethod.WriteEndOfStream(m_stream); } m_stream.Write((byte)ServerResponse.ErrorWhileReading); m_stream.Write(ex.ToString()); m_stream.Flush(); return(false); } }
public static IDictionary <Guid, SignalDataBase> GetSignalsWithCalculations(this ClientDatabaseBase <HistorianKey, HistorianValue> database, SeekFilterBase <HistorianKey> timestamps, IEnumerable <ISignalCalculation> signals, SortedTreeEngineReaderOptions readerOptions) { Dictionary <ulong, SignalDataBase> queryResults = database.GetSignals(timestamps, signals, readerOptions); Dictionary <Guid, SignalDataBase> calculatedResults = new Dictionary <Guid, SignalDataBase>(); foreach (ISignalCalculation signal in signals) { if (signal.HistorianId.HasValue) { calculatedResults.Add(signal.SignalId, queryResults[signal.HistorianId.Value]); } else { calculatedResults.Add(signal.SignalId, new SignalData(signal.Functions)); } } foreach (ISignalCalculation signal in signals) { signal.Calculate(calculatedResults); } return(calculatedResults); }
public SequentialReaderStream(ArchiveList <TKey, TValue> archiveList, SortedTreeEngineReaderOptions readerOptions = null, SeekFilterBase <TKey> keySeekFilter = null, MatchFilterBase <TKey, TValue> keyMatchFilter = null, WorkerThreadSynchronization workerThreadSynchronization = null) { if (readerOptions == null) { readerOptions = SortedTreeEngineReaderOptions.Default; } if (keySeekFilter == null) { keySeekFilter = new SeekFilterUniverse <TKey>(); } if (keyMatchFilter == null) { keyMatchFilter = new MatchFilterUniverse <TKey, TValue>(); } if (workerThreadSynchronization == null) { m_ownsWorkerThreadSynchronization = true; workerThreadSynchronization = new WorkerThreadSynchronization(); } m_workerThreadSynchronization = workerThreadSynchronization; m_pointCount = 0; m_keySeekFilter = keySeekFilter; m_keyMatchFilter = keyMatchFilter; m_keyMatchIsUniverse = (m_keyMatchFilter as MatchFilterUniverse <TKey, TValue>) != null; if (readerOptions.Timeout.Ticks > 0) { m_timeout = new TimeoutOperation(); m_timeout.RegisterTimeout(readerOptions.Timeout, () => m_timedOut = true); } m_snapshot = archiveList.CreateNewClientResources(); m_snapshot.UpdateSnapshot(); m_tablesOrigList = new List <BufferedArchiveStream <TKey, TValue> >(); for (int x = 0; x < m_snapshot.Tables.Count(); x++) { ArchiveTableSummary <TKey, TValue> table = m_snapshot.Tables[x]; if (table != null) { if (table.Contains(keySeekFilter.StartOfRange, keySeekFilter.EndOfRange)) { m_tablesOrigList.Add(new BufferedArchiveStream <TKey, TValue>(x, table)); } else { m_snapshot.Tables[x] = null; } } } m_sortedArchiveStreams = new CustomSortHelper <BufferedArchiveStream <TKey, TValue> >(m_tablesOrigList, IsLessThan); m_keySeekFilter.Reset(); if (m_keySeekFilter.NextWindow()) { SeekToKey(m_keySeekFilter.StartOfFrame); } else { Dispose(); } }