Represents a single historian database.
Наследование: IDisposable
Пример #1
0
 /// <summary>
 /// Unregisters a client database.
 /// </summary>
 /// <param name="client">the client database to unregister</param>
 private void Unregister(ClientDatabaseBase client)
 {
     lock (m_syncRoot)
     {
         m_connectedDatabases.Remove(client.Info.DatabaseName.ToUpper());
     }
 }
        public SnapDBClient(string hostAddress, int port, string instanceName, ulong startTime, ulong endTime, int frameRate, IEnumerable<ulong> pointIDs)
        {
            m_client = new HistorianClient(hostAddress, port);
            m_database = m_client.GetDatabase<HistorianKey, HistorianValue>(instanceName);
            m_key = new HistorianKey();
            m_value = new HistorianValue();

            SeekFilterBase<HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange<HistorianKey>(DataPoint.RoundTimestamp(startTime, frameRate), DataPoint.RoundTimestamp(endTime, frameRate));
            MatchFilterBase<HistorianKey, HistorianValue> pointFilter = PointIdMatchFilter.CreateFromList<HistorianKey, HistorianValue>(pointIDs);

            m_stream = m_database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter);
        }
 public HistorianIArchive(HistorianServer server, string databaseName)
 {
     m_server = server;
     m_client = SnapClient.Connect(m_server.Host);
     m_clientDatabase = m_client.GetDatabase<HistorianKey, HistorianValue>(databaseName);
 }
Пример #4
0
        private void OpenArchives()
        {
            ArchiveIsOpen = true;

            List<Metadata> measurements = new List<Metadata>();
            string connectionString = "server=" + serverHost + ":" + GEPPort + "; interface=0.0.0.0";
            DataSet metadata = MetadataRetriever.GetMetadata(connectionString, 30000);

            // Reference meta-data tables
            DataTable measurementTable = metadata.Tables["MeasurementDetail"];
            foreach (DataRow measurement in measurementTable.Select("SignalAcronym <> 'STAT' and SignalAcronym <> 'DIGI'"))
            {
                measurements.Add(new Metadata(measurement));
            }

            m_client = new HistorianClient(serverHost, int.Parse(historianPort));
            ArchiveReader = m_client.GetDatabase<HistorianKey, HistorianValue>(instanceName);

            StreamWriter sw = new StreamWriter("measurementList.txt");
            sw.WriteLine("Last opened at {0}", DateTime.Now);
            sw.WriteLine("Historian Instance: {0}, Port: {1}\n", instanceName, historianPort);
            foreach (var item in measurements)
            {
                sw.WriteLine("PointID: {0}, SignalReference: {1}, Description: {2}", item.PointID, item.SignalReference, item.Description);
            }
            sw.Close();
        }
        public HistorianDataPointReader(ClientDatabaseBase<HistorianKey, HistorianValue> database, DateTime start, DateTime stop, TableDefinition tableDefinition)
        {
            HashSet<ulong> allPoints = new HashSet<ulong>();
            m_tableDefinition = tableDefinition;

            foreach (var signal in tableDefinition.m_signalGroups)
            {
                foreach (var point in signal.Value)
                {
                    if (point.HasValue)
                        allPoints.Add((ulong)point.Value);
                }
            }
            //m_results = database.GetFrames((ulong)start.Ticks, (ulong)stop.Ticks, allPoints);
            m_currentFrame = -1;
        }
        /// <summary>
        /// Read historian data from server.
        /// </summary>
        /// <param name="database">Client database to use for query.</param>
        /// <param name="startTime">Start time of query.</param>
        /// <param name="stopTime">Stop time of query.</param>
        /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param>
        /// <param name="resolution">Resolution for data query.</param>
        /// <param name="seriesLimit">Maximum number of points per series.</param>
        /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param>
        /// <param name="cancellationToken">Cancellation token for query.</param>
        /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns>
        public static IEnumerable<TrendValue> GetHistorianData(ClientDatabaseBase<HistorianKey, HistorianValue> database, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null)
        {
            if ((object)cancellationToken == null)
                cancellationToken = new CancellationToken();

            if ((object)database == null)
                yield break;

            TimeSpan resolutionInterval = resolution.GetInterval();
            SeekFilterBase<HistorianKey> timeFilter;
            MatchFilterBase<HistorianKey, HistorianValue> pointFilter = null;
            HistorianKey key = new HistorianKey();
            HistorianValue value = new HistorianValue();

            // Set data scan resolution
            if (resolution == Resolution.Full)
            {
                timeFilter = TimestampSeekFilter.CreateFromRange<HistorianKey>(startTime, stopTime);
            }
            else
            {
                BaselineTimeInterval interval = BaselineTimeInterval.Second;

                if (resolutionInterval.Ticks < Ticks.PerMinute)
                    interval = BaselineTimeInterval.Second;
                else if (resolutionInterval.Ticks < Ticks.PerHour)
                    interval = BaselineTimeInterval.Minute;
                else if (resolutionInterval.Ticks == Ticks.PerHour)
                    interval = BaselineTimeInterval.Hour;

                startTime = startTime.BaselinedTimestamp(interval);
                stopTime = stopTime.BaselinedTimestamp(interval);

                timeFilter = TimestampSeekFilter.CreateFromIntervalData<HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond));
            }

            Dictionary<ulong, DataRow> metadata = null;
            LocalOutputAdapter historianAdapter;

            if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out historianAdapter))
                metadata = historianAdapter?.Measurements;

            if ((object)metadata == null)
                yield break;

            // Setup point ID selections
            if ((object)measurementIDs != null)
                pointFilter = PointIdMatchFilter.CreateFromList<HistorianKey, HistorianValue>(measurementIDs);
            else
                measurementIDs = metadata.Keys.ToArray();

            // Start stream reader for the provided time window and selected points
            Dictionary<ulong, long> pointCounts = new Dictionary<ulong, long>(measurementIDs.Length);
            Dictionary<ulong, long> intervals = new Dictionary<ulong, long>(measurementIDs.Length);
            Dictionary<ulong, ulong> lastTimes = new Dictionary<ulong, ulong>(measurementIDs.Length);
            double range = (stopTime - startTime).TotalSeconds;
            ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value;
            long pointCount;
            DataRow row;

            if (resolutionSpan <= 1UL)
                resolutionSpan = Ticks.PerSecond;

            if (seriesLimit < 1)
                seriesLimit = 1;

            // Estimate total measurement counts per point so decimation intervals for each series can be calculated
            foreach (ulong measurementID in measurementIDs)
            {
                if (resolution == Resolution.Full)
                    pointCounts[measurementID] = metadata.TryGetValue(measurementID, out row) ? (long)(int.Parse(row["FramesPerSecond"].ToString()) * range) : 2;
                else
                    pointCounts[measurementID] = (long)(range / resolutionInterval.TotalSeconds.NotZero(1.0D));
            }

            foreach (ulong measurementID in pointCounts.Keys)
                intervals[measurementID] = (pointCounts[measurementID] / seriesLimit).NotZero(1L);

            lock (database)
            {
                TreeStream<HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter);

                while (stream.Read(key, value) && !cancellationToken.IsCancelled)
                {
                    pointID = key.PointID;
                    timestamp = key.Timestamp;
                    pointCount = pointCounts[pointID];

                    if (pointCount++ % intervals[pointID] == 0 || (!forceLimit && timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan))
                        yield return new TrendValue
                        {
                            ID = (long)pointID,
                            Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond,
                            Value = value.AsSingle
                        };

                    pointCounts[pointID] = pointCount;
                    lastTimes[pointID] = timestamp;
                }
            }
        }
 private void ExecuteFolderOperation(ClientDatabaseBase<HistorianKey, HistorianValue> database, string folderName, Action<List<Guid>> folderOperation)
 {
     List<Guid> files = database.GetAllAttachedFiles().Where(file => Path.GetFullPath(file.FileName).StartsWith(folderName, StringComparison.OrdinalIgnoreCase)).Select(file => file.Id).ToList();
     folderOperation(files);
 }
 private void ExecuteWildCardFileOperation(ClientDatabaseBase<HistorianKey, HistorianValue> database, string fileName, Action<List<Guid>> fileOperation)
 {
     HashSet<string> sourceFiles = new HashSet<string>(FilePath.GetFileList(fileName).Select(Path.GetFullPath), StringComparer.OrdinalIgnoreCase);
     List<Guid> files = database.GetAllAttachedFiles().Where(file => sourceFiles.Contains(Path.GetFullPath(file.FileName))).Select(file => file.Id).ToList();
     fileOperation(files);
 }