/// <summary> /// Creates a new <see cref="DataPointScanner"/> instance. /// </summary> /// <param name="dataBlockAllocationTable"><see cref="ArchiveFileAllocationTable"/> for the file to be scanned.</param> /// <param name="historianID">Historian ID to scan for.</param> /// <param name="startTime">Desired start time.</param> /// <param name="endTime">Desired end time.</param> /// <param name="dataReadExceptionHandler">Read exception handler.</param> public DataPointScanner(ArchiveFileAllocationTable dataBlockAllocationTable, int historianID, TimeTag startTime, TimeTag endTime, EventHandler<EventArgs<Exception>> dataReadExceptionHandler) { // Find all data blocks for desired point over given time range m_dataBlocks = dataBlockAllocationTable.FindDataBlocks(historianID, startTime, endTime, false); m_startTime = startTime; m_endTime = endTime; m_historianID = historianID; m_dataReadExceptionHandler = dataReadExceptionHandler; }
/// <summary> /// Initializes a new instance of the <see cref="IntercomRecord"/> class. /// </summary> /// <param name="recordID">ID of the <see cref="IntercomRecord"/>.</param> public IntercomRecord(int recordID) { m_recordID = recordID; m_latestDataTime = TimeTag.MinValue; m_sourceLatestDataTime = new List<TimeTag>(); for (int i = 0; i < 20; i++) { m_sourceLatestDataTime.Add(TimeTag.MinValue); } }
public bool RemoveTimeTag(TimeTag timeTag) { var currentLyric = timeTagInLyric(timeTag); if (currentLyric == null) { return(false); } changeHandler?.BeginChange(); // delete time tag from list currentLyric.TimeTags = currentLyric.TimeTags.Where(x => x != timeTag).ToArray(); changeHandler?.EndChange(); return(true); }
/// <summary> /// Initializes <see cref="ArchiveDataPoint"/> from the specified <paramref name="buffer"/>. /// </summary> /// <param name="buffer">Binary image to be used for initializing <see cref="ArchiveDataPoint"/>.</param> /// <param name="startIndex">0-based starting index of initialization data in the <paramref name="buffer"/>.</param> /// <param name="length">Valid number of bytes in <paramref name="buffer"/> from <paramref name="startIndex"/>.</param> /// <returns>Number of bytes used from the <paramref name="buffer"/> for initializing <see cref="ArchiveDataPoint"/>.</returns> public virtual int ParseBinaryImage(byte[] buffer, int startIndex, int length) { if (length >= FixedLength) { // Binary image has sufficient data. Flags = LittleEndian.ToInt16(buffer, startIndex + 4); Value = LittleEndian.ToSingle(buffer, startIndex + 6); Time = new TimeTag(LittleEndian.ToInt32(buffer, startIndex) + // Seconds ((double)(m_flags.GetMaskedValue(MillisecondMask) >> 5) / 1000)); // Milliseconds return(FixedLength); } else { // Binary image does not have sufficient data. return(0); } }
/// <summary> /// Sets the boundaries for the x-axis. /// </summary> /// <param name="startTimeString">A string representation of the lower x-axis boundary.</param> /// <param name="endTimeString">A string representation of the upper x-axis boundary.</param> public void SetInterval(string startTimeString, string endTimeString) { DateTime startTime = TimeTag.Parse(startTimeString).ToDateTime(); DateTime endTime = TimeTag.Parse(endTimeString).ToDateTime(); if (startTime > endTime) { throw new ArgumentException("startTime > endTime"); } ClearHistory(); m_xAxis.Minimum = null; m_xAxis.Maximum = null; m_yAxis.Minimum = null; m_yAxis.Maximum = null; m_xAxis.Minimum = startTime; m_xAxis.Maximum = endTime; }
public bool SetTimeTagTime(TimeTag timeTag, double time) { var currentLyric = timeTagInLyric(timeTag); if (currentLyric == null) { return(false); } changeHandler?.BeginChange(); timeTag.Time = time; refreshTimeTag(currentLyric); changeHandler?.EndChange(); currentLyric.TimeTagsBindable.TriggerChange(); return(true); }
/// <summary> /// Creates a new instance of the <see cref="HistorianViewUserControl"/> class. /// </summary> public HistorianViewUserControl() { m_archiveFiles = new List <ArchiveFile>(); m_metadata = new List <MetadataWrapper>(); m_contextMenuItems = new List <MenuItem>(); m_visibleColumns = new HashSet <string>(); InitializeComponent(); InitializeChartWindow(); StartTime = TimeTag.Parse("*-5M").ToDateTime(); EndTime = TimeTag.Parse("*").ToDateTime(); m_currentTimeCheckBox.IsChecked = true; string[] lastArchiveLocations = ConfigurationFile.Current.Settings.General["ArchiveLocations", true].ValueAs("").Split('|').Where(archiveLocation => !string.IsNullOrWhiteSpace(archiveLocation) && File.Exists(archiveLocation)).ToArray(); if (lastArchiveLocations == null || lastArchiveLocations.Length == 0) { // See if a local archive folder exists with a valid archive string defaultArchiveLocation = FilePath.GetAbsolutePath("Archive"); if (Directory.Exists(defaultArchiveLocation)) { lastArchiveLocations = Directory.GetFiles(defaultArchiveLocation, "*_archive.d"); } if (lastArchiveLocations == null || lastArchiveLocations.Length == 0) { // See if a local statistics folder exists with a valid archive defaultArchiveLocation = FilePath.GetAbsolutePath("Statistics"); if (Directory.Exists(defaultArchiveLocation)) { lastArchiveLocations = Directory.GetFiles(defaultArchiveLocation, "*_archive.d"); } } } if (lastArchiveLocations != null && lastArchiveLocations.Length > 0) { OpenArchives(lastArchiveLocations); } }
/// <summary> /// Initializes <see cref="PacketType101DataPoint"/> from the specified <paramref name="buffer"/>. /// </summary> /// <param name="buffer">Binary image to be used for initializing <see cref="PacketType101DataPoint"/>.</param> /// <param name="startIndex">0-based starting index of initialization data in the <paramref name="buffer"/>.</param> /// <param name="length">Valid number of bytes in <paramref name="buffer"/> from <paramref name="startIndex"/>.</param> /// <returns>Number of bytes used from the <paramref name="buffer"/> for initializing <see cref="PacketType101DataPoint"/>.</returns> public override int ParseBinaryImage(byte[] buffer, int startIndex, int length) { if (length >= FixedLength) { // Binary image has sufficient data. HistorianID = LittleEndian.ToInt32(buffer, startIndex); Flags = LittleEndian.ToInt16(buffer, startIndex + 8); Value = LittleEndian.ToSingle(buffer, startIndex + 10); Time = new TimeTag(LittleEndian.ToInt32(buffer, startIndex + 4) + // Seconds ((decimal)(Flags.GetMaskedValue(MillisecondMask) >> 5) / 1000)); // Milliseconds return(FixedLength); } else { // Binary image does not have sufficient data. return(0); } }
public void TestWriteSpeed() { foreach (string file in Directory.GetFiles("c:\\temp\\benchmark\\", "*.*", SearchOption.AllDirectories)) { File.Delete(file); } Console.WriteLine("Creating initial archive file..."); using (ArchiveFile file = OpenArchiveFile("c:\\temp\\benchmark\\test_archive.d")) { file.DataWriteException += (sender, e) => Console.WriteLine("Data Write Exception: {0}", e.Argument.Message); file.FileFull += (sender, e) => Console.WriteLine("File is full!"); file.FutureDataReceived += (sender, e) => Console.WriteLine("Future data received"); file.OrphanDataReceived += (sender, e) => Console.WriteLine("Orphaned data received"); Console.WriteLine("Start file write..."); TimeTag now = new TimeTag(DateTime.UtcNow); Stopwatch sw = new Stopwatch(); sw.Start(); for (int x = 0; x < PointsToArchive / MetaDataPoints; x++) { for (int i = 1; i <= MetaDataPoints; i++) { file.WriteData(new ArchiveDataPoint(i, now, x, Quality.Good)); } now = new TimeTag(now.Value + 1.0M); } double totalTime = sw.Elapsed.TotalSeconds; Console.WriteLine("Completed write test in {0:#,##0.00} seconds at {1:#,##0.00} points per second", totalTime, PointsToArchive / totalTime); Console.WriteLine(" Points received = {0:#,##0}", file.Fat.DataPointsReceived); Console.WriteLine(" Points archived = {0:#,##0}", file.Fat.DataPointsArchived); Console.WriteLine(" Data blocks used = {0:#,##0}", file.Fat.DataBlocksUsed); Console.WriteLine("Data blocks available = {0:#,##0}", file.Fat.DataBlocksAvailable); } }
private TimeTag m_searchEndTime; // <=| #endregion #region [ Constructors ] /// <summary> /// Initializes a new instance of the <see cref="ArchiveFileAllocationTable"/> class. /// </summary> /// <param name="parent">An <see cref="ArchiveFile"/> object.</param> internal ArchiveFileAllocationTable(ArchiveFile parent) { m_parent = parent; m_dataBlockPointers = new List<ArchiveDataBlockPointer>(); if (m_parent.FileData.Length == 0) { // File is brand new. m_fileStartTime = TimeTag.MinValue; m_fileEndTime = TimeTag.MinValue; m_dataBlockSize = m_parent.DataBlockSize; m_dataBlockCount = ArchiveFile.MaximumDataBlocks(m_parent.FileSize, m_parent.DataBlockSize); for (int i = 0; i < m_dataBlockCount; i++) { m_dataBlockPointers.Add(new ArchiveDataBlockPointer(m_parent, i)); } } else { // File was created previously. byte[] fixedFatData = new byte[FixedBinaryLength]; m_parent.FileData.Seek(-fixedFatData.Length, SeekOrigin.End); m_parent.FileData.Read(fixedFatData, 0, fixedFatData.Length); FileStartTime = new TimeTag(EndianOrder.LittleEndian.ToDouble(fixedFatData, 0)); FileEndTime = new TimeTag(EndianOrder.LittleEndian.ToDouble(fixedFatData, 8)); DataPointsReceived = EndianOrder.LittleEndian.ToInt32(fixedFatData, 16); DataPointsArchived = EndianOrder.LittleEndian.ToInt32(fixedFatData, 20); DataBlockSize = EndianOrder.LittleEndian.ToInt32(fixedFatData, 24); DataBlockCount = EndianOrder.LittleEndian.ToInt32(fixedFatData, 28); byte[] variableFatData = new byte[m_dataBlockCount * ArchiveDataBlockPointer.ByteCount]; m_parent.FileData.Seek(-(variableFatData.Length + FixedBinaryLength), SeekOrigin.End); m_parent.FileData.Read(variableFatData, 0, variableFatData.Length); for (int i = 0; i < m_dataBlockCount; i++) { m_dataBlockPointers.Add(new ArchiveDataBlockPointer(m_parent, i, variableFatData, i * ArchiveDataBlockPointer.ByteCount, variableFatData.Length)); } } }
public List <TimeTag> getListTimeTag(DataTable dt, List <TimeTag> list) { foreach (DataRow dr in dt.Rows) { TimeTag tt = new TimeTag(); tt.CategoryID = Convert.ToInt32(dr["categoryID"]); tt.TimeFrom = dr["timefrom"].ToString(); tt.TimeTo = dr["timeto"].ToString(); tt.TimeTagName = dr["timeTagName"].ToString(); try { tt.TimeTagID = Convert.ToInt32(dr["timeTagID"]); } catch (Exception e) { Console.WriteLine(e); throw; } list.Add(tt); } return(list); }
public RowBackground(Lyric lyric, TimeTag timeTag) { this.lyric = lyric; this.timeTag = timeTag; RelativeSizeAxes = Axes.X; Height = 25; AlwaysPresent = true; CornerRadius = 3; Masking = true; Children = new Drawable[] { hoveredBackground = new Box { RelativeSizeAxes = Axes.Both, Alpha = 0, }, }; }
private bool timeTagMovable(TimeTag timeTag) { if (timeTag == null) { return(false); } switch (Mode) { case RecordingMovingCaretMode.None: return(true); case RecordingMovingCaretMode.OnlyStartTag: return(timeTag.Index.State == TextIndex.IndexState.Start); case RecordingMovingCaretMode.OnlyEndTag: return(timeTag.Index.State == TextIndex.IndexState.End); default: throw new InvalidOperationException(nameof(RecordingMovingCaretMode)); } }
public override void RunSuite() { // Make some test objects. DateTime dt1 = new DateTime(2005, 5, 9, 15, 47, 39, 123); DateTime dt2 = new DateTime(2022, 11, 24, 7, 29, 6, 801); TimeTag ttImmediate = new TimeTag(); // default constructor TimeTag tt1 = new TimeTag(dt1); // specific constructor TimeTag tt2 = new TimeTag(dt2); TimeTag tt1raw = new TimeTag(tt1.Raw); // constructor from raw TimeTag tt2copy = new TimeTag(new TimeTag(dt2)); // copy constructor // Check them all. UT_EQUAL(ttImmediate.ToString(), "When:Immediate"); UT_EQUAL(tt1.ToString(), "When:2005-05-09 15:47:39.000 Seconds:3324642459 Fraction:528280977"); UT_EQUAL(tt2.ToString(), "When:2022-11-24 07:29:06.000 Seconds:3878263746 Fraction:3440268803"); UT_TRUE(tt1.Equals(tt1)); UT_FALSE(ttImmediate.Equals(tt2)); UT_FALSE(tt1raw.Equals(tt1)); UT_TRUE(tt1 == tt1raw); UT_TRUE(tt2 == tt2copy); UT_TRUE(tt1 != tt2); UT_FALSE(tt1 != tt1raw); UT_FALSE(tt2 != tt2copy); UT_FALSE(tt1 == tt2); UT_TRUE(tt2 >= tt1); UT_TRUE(tt2 > tt1); UT_FALSE(tt1 >= tt2); UT_FALSE(tt1 > tt2); UT_TRUE(tt1 <= tt2); UT_TRUE(tt1 < tt2); UT_FALSE(tt2 <= tt1); UT_FALSE(tt2 < tt1); }
/// <summary> /// Initializes <see cref="IntercomRecord"/> from the specified <paramref name="buffer"/>. /// </summary> /// <param name="buffer">Binary image to be used for initializing <see cref="IntercomRecord"/>.</param> /// <param name="startIndex">0-based starting index of initialization data in the <paramref name="buffer"/>.</param> /// <param name="length">Valid number of bytes in <paramref name="buffer"/> from <paramref name="startIndex"/>.</param> /// <returns>Number of bytes used from the <paramref name="buffer"/> for initializing <see cref="IntercomRecord"/>.</returns> public int ParseBinaryImage(byte[] buffer, int startIndex, int length) { if (length >= FixedLength) { // Binary image has sufficient data. DataBlocksUsed = LittleEndian.ToInt32(buffer, startIndex); RolloverInProgress = LittleEndian.ToBoolean(buffer, startIndex + 4); LatestDataTime = new TimeTag(LittleEndian.ToDouble(buffer, startIndex + 8)); LatestDataID = LittleEndian.ToInt32(buffer, startIndex + 16); for (int i = 0; i < m_sourceLatestDataTime.Count; i++) { m_sourceLatestDataTime[i] = new TimeTag(LittleEndian.ToDouble(buffer, startIndex + 20 + (i * 8))); } return(FixedLength); } else { // Binary image does not have sufficient data. return(0); } }
public bool ClearTimeTagTime(TimeTag timeTag) { if (timeTag.Time == null) { return(false); } var currentLyric = timeTagInLyric(timeTag); if (currentLyric == null) { return(false); } changeHandler?.BeginChange(); timeTag.Time = null; refreshTimeTag(currentLyric); changeHandler?.EndChange(); return(true); }
public void TestWriteSpeed() { foreach (var file in Directory.GetFiles("c:\\temp\\benchmark\\", "*.*", SearchOption.AllDirectories)) File.Delete(file); Console.WriteLine("Creating initial archive file..."); using (ArchiveFile file = OpenArchiveFile("c:\\temp\\benchmark\\test_archive.d")) { file.DataWriteException += (sender, e) => Console.WriteLine("Data Write Exception: {0}", e.Argument.Message); file.FileFull += (sender, e) => Console.WriteLine("File is full!"); file.FutureDataReceived += (sender, e) => Console.WriteLine("Future data received"); file.OrphanDataReceived += (sender, e) => Console.WriteLine("Orphaned data received"); Console.WriteLine("Start file write..."); TimeTag now = new TimeTag(DateTime.UtcNow); Stopwatch sw = new Stopwatch(); sw.Start(); for (int x = 0; x < PointsToArchive / MetaDataPoints; x++) { for (int i = 1; i <= MetaDataPoints; i++) file.WriteData(new ArchiveDataPoint(i, now, x, Quality.Good)); now = new TimeTag(now.Value + 1.0M); } double totalTime = sw.Elapsed.TotalSeconds; Console.WriteLine("Completed write test in {0:#,##0.00} seconds at {1:#,##0.00} points per second", totalTime, PointsToArchive / totalTime); Console.WriteLine(" Points received = {0:#,##0}", file.Fat.DataPointsReceived); Console.WriteLine(" Points archived = {0:#,##0}", file.Fat.DataPointsArchived); Console.WriteLine(" Data blocks used = {0:#,##0}", file.Fat.DataBlocksUsed); Console.WriteLine("Data blocks available = {0:#,##0}", file.Fat.DataBlocksAvailable); } }
// Gets the list of data point scanners used to // scan the file for data belonging to this query. private List <DataPointScanner> GetScanners() { List <DataPointScanner> dataPointScanners = new List <DataPointScanner>(); if (m_historianIDs != null && m_historianIDs.Any()) { TimeTag startTime = m_startTime ?? TimeTag.MinValue; TimeTag endTime = m_endTime ?? TimeTag.MaxValue; int resumeFromHistorianID = 0; bool includeStartTime = true; // Set up parameters needed to properly resume a query after rollover if ((object)m_resumeFrom != null) { resumeFromHistorianID = m_resumeFrom.HistorianID; startTime = m_resumeFrom.Time ?? startTime; includeStartTime = false; } // Create data point scanners for each historian ID List <int> historianIDs = m_historianIDs.ToList(); historianIDs.Sort(); foreach (int historianID in historianIDs) { dataPointScanners.Add(new DataPointScanner(m_fileAllocationTable, historianID, startTime, endTime, includeStartTime, m_dataReadExceptionHandler)); if (historianID == resumeFromHistorianID) { includeStartTime = true; } } } return(dataPointScanners); }
/// <summary> /// Initializes a new instance of the <see cref="ArchiveFileAllocationTable"/> class. /// </summary> /// <param name="parent">An <see cref="ArchiveFile"/> object.</param> internal ArchiveFileAllocationTable(ArchiveFile parent) { m_parent = parent; m_dataBlockPointers = new List <ArchiveDataBlockPointer>(); m_fixedTableRegion = new FixedTableRegion(this); m_variableTableRegion = new VariableTableRegion(this); if (m_parent.FileData.Length == 0) { // File is brand new. m_fileStartTime = TimeTag.MinValue; m_fileEndTime = TimeTag.MinValue; m_dataBlockSize = m_parent.DataBlockSize; m_dataBlockCount = ArchiveFile.MaximumDataBlocks(m_parent.FileSize, m_parent.DataBlockSize); for (int i = 0; i < m_dataBlockCount; i++) { m_dataBlockPointers.Add(new ArchiveDataBlockPointer(m_parent, i)); } } else { // Existing file, read table regions: // Seek to beginning of fixed table region m_parent.FileData.Seek(-m_fixedTableRegion.BinaryLength, SeekOrigin.End); // Parse fixed table region m_fixedTableRegion.ParseBinaryImageFromStream(m_parent.FileData); // Seek to beginning of variable table region (above fixed from bottom of file) m_parent.FileData.Seek(-(m_fixedTableRegion.BinaryLength + m_variableTableRegion.BinaryLength), SeekOrigin.End); // Parse variable table region m_variableTableRegion.ParseBinaryImageFromStream(m_parent.FileData); } }
// Kick start read process for historian private void StartDataReader(object state) { MeasurementKey[] requestedKeys = SupportsTemporalProcessing ? RequestedOutputMeasurementKeys : OutputMeasurements.MeasurementKeys().ToArray(); if (Enabled && (object)m_archiveReader != null && (object)requestedKeys != null && requestedKeys.Length > 0) { m_historianIDs = requestedKeys.Select(key => unchecked ((int)key.ID)).ToArray(); m_publicationTime = 0; // Start data read from historian lock (m_readTimer) { m_startTime = base.StartTimeConstraint <TimeTag.MinValue?TimeTag.MinValue : base.StartTimeConstraint> TimeTag.MaxValue ? TimeTag.MaxValue : new TimeTag(base.StartTimeConstraint); m_stopTime = base.StopTimeConstraint <TimeTag.MinValue?TimeTag.MinValue : base.StopTimeConstraint> TimeTag.MaxValue ? TimeTag.MaxValue : new TimeTag(base.StopTimeConstraint); m_dataReader = m_archiveReader.ReadData(m_historianIDs, m_startTime, m_stopTime).GetEnumerator(); m_readTimer.Enabled = m_dataReader.MoveNext(); if (m_readTimer.Enabled) { OnStatusMessage(MessageLevel.Info, "Starting historical data read..."); } else { OnStatusMessage(MessageLevel.Info, "No historical data was available to read for given timeframe."); OnProcessingComplete(); } } } else { m_readTimer.Enabled = false; OnStatusMessage(MessageLevel.Info, "No measurement keys have been requested for reading, historian reader is idle."); OnProcessingComplete(); } }
/// <summary> /// Generate center time-tag with time. /// </summary> /// <param name="startTimeTag"></param> /// <param name="endTimeTag"></param> /// <param name="index"></param> /// <returns></returns> public static TimeTag GenerateCenterTimeTag(TimeTag startTimeTag, TimeTag endTimeTag, TextIndex index) { if (startTimeTag == null || endTimeTag == null) { throw new ArgumentNullException($"{nameof(startTimeTag)} or {nameof(endTimeTag)} cannot be null."); } if (startTimeTag.Index > endTimeTag.Index) { throw new InvalidOperationException($"{nameof(endTimeTag.Index)} cannot larger than {startTimeTag.Index}"); } if (index < startTimeTag.Index || index > endTimeTag.Index) { throw new InvalidOperationException($"{nameof(endTimeTag.Index)} cannot larger than {startTimeTag.Index}"); } if (startTimeTag.Time == null || endTimeTag.Time == null) { return(new TimeTag(index)); } var diffFromStartToEnd = getTimeCalculationIndex(endTimeTag.Index) - getTimeCalculationIndex(startTimeTag.Index); var diffFromStartToNow = getTimeCalculationIndex(index) - getTimeCalculationIndex(startTimeTag.Index); if (diffFromStartToEnd == 0 || diffFromStartToNow == 0) { return(new TimeTag(index, startTimeTag.Time)); } var time = startTimeTag.Time + (endTimeTag.Time - startTimeTag.Time) / diffFromStartToEnd * diffFromStartToNow; return(new TimeTag(index, time));
/// <summary> /// Initializes a new instance of the <see cref="StateRecordDataPoint"/> class. /// </summary> /// <param name="historianID">Historian identifier of <see cref="StateRecordDataPoint"/>.</param> /// <param name="time"><see cref="TimeTag"/> of <see cref="StateRecordDataPoint"/>.</param> /// <param name="value">Floating-point value of <see cref="StateRecordDataPoint"/>.</param> /// <param name="quality"><see cref="Quality"/> of <see cref="StateRecordDataPoint"/>.</param> public StateRecordDataPoint(int historianID, TimeTag time, float value, Quality quality) : base(historianID, time, value, quality) { }
/// <summary> /// Initializes a new instance of the <see cref="PacketType1"/> class. /// </summary> /// <param name="measurement">Object that implements the <see cref="IMeasurement"/> interface.</param> public PacketType1(IMeasurement measurement) : this() { HistorianID = (int)measurement.Key.ID; Time = new TimeTag((DateTime)measurement.Timestamp); Value = (float)measurement.AdjustedValue; Quality = measurement.HistorianQuality(); }
private bool FindHistoricArchiveFileForWrite(Info fileInfo, TimeTag searchTime) { return ((object)fileInfo != null && searchTime.CompareTo(fileInfo.StartTimeTag) >= 0 && searchTime.CompareTo(fileInfo.EndTimeTag) <= 0); }
/// <summary> /// Returns an <see cref="ArchiveDataBlock"/> for writing <see cref="ArchiveDataPoint"/>s for the specified <paramref name="historianID"/>. /// </summary> /// <param name="historianID">Historian identifier for which the <see cref="ArchiveDataBlock"/> is being requested.</param> /// <param name="dataTime"><see cref="TimeTag"/> of the <see cref="ArchiveDataPoint"/> to be written to the <see cref="ArchiveDataBlock"/>.</param> /// <param name="blockIndex"><see cref="ArchiveDataBlock.Index"/> of the <see cref="ArchiveDataBlock"/> last used for writing <see cref="ArchiveDataPoint"/>s for the <paramref name="historianID"/>.</param> /// <returns><see cref="ArchiveDataBlock"/> object if available; otherwise null if all <see cref="ArchiveDataBlock"/>s have been allocated.</returns> internal ArchiveDataBlock RequestDataBlock(int historianID, TimeTag dataTime, int blockIndex) { ArchiveDataBlock dataBlock = null; ArchiveDataBlockPointer dataBlockPointer = null; if (blockIndex >= 0 && blockIndex < m_dataBlockCount) { // Valid data block index is specified, so retrieve the corresponding data block. lock (m_dataBlockPointers) { dataBlockPointer = m_dataBlockPointers[blockIndex]; } dataBlock = dataBlockPointer.DataBlock; if (!dataBlockPointer.IsAllocated && dataBlock.SlotsUsed > 0) { // Clear existing data from the data block since it is unallocated. dataBlock.Reset(); } else if (dataBlockPointer.IsAllocated && (dataBlockPointer.HistorianID != historianID || (dataBlockPointer.HistorianID == historianID && dataBlock.SlotsAvailable == 0))) { // Search for a new data block since the suggested data block cannot be used. blockIndex = -1; } } if (blockIndex < 0) { // Negative data block index is specified indicating a search must be performed for a data block. dataBlock = FindLastDataBlock(historianID); if ((object)dataBlock != null && dataBlock.SlotsAvailable == 0) { // Previously used data block is full. dataBlock = null; } if ((object)dataBlock == null) { // Look for the first unallocated data block. dataBlock = FindDataBlock(-1); if ((object)dataBlock == null) { // Extend the file for historic writes only. if (m_parent.FileType == ArchiveFileType.Historic) { Extend(); dataBlock = m_dataBlockPointers[m_dataBlockPointers.Count - 1].DataBlock; } } else { // Reset the unallocated data block if there is data in it. if (dataBlock.SlotsUsed > 0) dataBlock.Reset(); } } // Get the pointer to the data block so that its information can be updated if necessary. if ((object)dataBlock == null) { dataBlockPointer = null; } else { lock (m_dataBlockPointers) { dataBlockPointer = m_dataBlockPointers[dataBlock.Index]; } } } if ((object)dataBlockPointer != null && !dataBlockPointer.IsAllocated) { // Mark the data block as allocated. dataBlockPointer.HistorianID = historianID; dataBlockPointer.StartTime = dataTime; // Set the file start time if not set. if (m_fileStartTime == TimeTag.MinValue) m_fileStartTime = dataTime; // Persist data block information to disk. lock (m_parent.FileData) { // We'll write information about the just allocated data block to the file. m_parent.FileData.Seek(DataLength + ArrayDescriptorLength + (dataBlockPointer.Index * ArchiveDataBlockPointer.FixedLength), SeekOrigin.Begin); dataBlockPointer.CopyBinaryImageToStream(m_parent.FileData); // We'll also write the fixed part of the FAT data that resides at the end. m_parent.FileData.Seek(-m_fixedTableRegion.BinaryLength, SeekOrigin.End); // Copy generated binary image to stream m_fixedTableRegion.CopyBinaryImageToStream(m_parent.FileData); if (!m_parent.CacheWrites) m_parent.FileData.Flush(); } // Re-fetch the data block with updated information after allocation. dataBlock = dataBlockPointer.DataBlock; } return dataBlock; }
public DateTime() { InitializeComponent(); timeTB.Text = "今天是:" + TimeTag.GetTime2(); updateTimeTP = new ThreadProperty(60000, false, false, update, this); }
/// <summary> /// Returns all <see cref="ArchiveDataBlock"/>s in the <see cref="ArchiveFile"/> for the specified <paramref name="historianID"/> with <see cref="ArchiveDataPoint"/> points later than the specified <paramref name="startTime"/>. /// </summary> /// <param name="historianID">Historian identifier.</param> /// <param name="startTime">Start <see cref="TimeTag"/>.</param> /// <param name="preRead">true to pre-read data to locate write cursor.</param> /// <returns>A collection of <see cref="ArchiveDataBlock"/>s.</returns> public List<ArchiveDataBlock> FindDataBlocks(int historianID, TimeTag startTime, bool preRead = true) { return FindDataBlocks(historianID, startTime, TimeTag.MaxValue, preRead); }
/// <summary> /// Initializes a new instance of the <see cref="ArchiveDataPoint"/> class. /// </summary> /// <param name="historianID">Historian identifier of <see cref="ArchiveDataPoint"/>.</param> public ArchiveDataPoint(int historianID) { m_time = TimeTag.MinValue; this.HistorianID = historianID; }
/// <summary> /// Initializes a new instance of the <see cref="ArchiveDataPoint"/> class. /// </summary> /// <param name="historianID">Historian identifier of <see cref="ArchiveDataPoint"/>.</param> /// <param name="time"><see cref="TimeTag"/> of <see cref="ArchiveDataPoint"/>.</param> /// <param name="value">Floating-point value of <see cref="ArchiveDataPoint"/>.</param> /// <param name="quality"><see cref="Quality"/> of <see cref="ArchiveDataPoint"/>.</param> public ArchiveDataPoint(int historianID, TimeTag time, float value, Quality quality) : this(historianID) { this.Time = time; this.Value = value; this.Quality = quality; }
/// <summary> /// Reads <see cref="ArchiveDataPoint"/>s from the <see cref="ArchiveFile"/>. /// </summary> /// <param name="historianID">Historian identifier for which <see cref="ArchiveDataPoint"/>s are to be retrieved.</param> /// <param name="startTime">Start <see cref="TimeTag"/> (in UTC) for the <see cref="ArchiveDataPoint"/>s to be retrieved.</param> /// <param name="endTime">End <see cref="TimeTag"/> (in UTC) for the <see cref="ArchiveDataPoint"/>s to be retrieved.</param> /// <param name="timeSorted">Indicates whether the data retrieved from the archive should be time sorted.</param> /// <returns><see cref="IEnumerable{T}"/> object containing zero or more <see cref="ArchiveDataPoint"/>s.</returns> public IEnumerable<IDataPoint> ReadData(int historianID, TimeTag startTime, TimeTag endTime, bool timeSorted = true) { return ReadData(new[] { historianID }, startTime, endTime, timeSorted); }
/// <summary> /// Reads <see cref="ArchiveDataPoint"/>s from the <see cref="ArchiveFile"/>. /// </summary> /// <param name="historianID">Historian identifier for which <see cref="ArchiveDataPoint"/>s are to be retrieved.</param> /// <param name="startTime">Start <see cref="TimeTag"/> (in UTC) for the <see cref="ArchiveDataPoint"/>s to be retrieved.</param> /// <param name="timeSorted">Indicates whether the data retrieved from the archive should be time sorted.</param> /// <returns><see cref="IEnumerable{T}"/> object containing zero or more <see cref="ArchiveDataPoint"/>s.</returns> public IEnumerable<IDataPoint> ReadData(int historianID, TimeTag startTime, bool timeSorted = true) { return ReadData(historianID, startTime, TimeTag.MaxValue, timeSorted); }
/// <summary> /// Creates a new instance of the <see cref="TimeSortedDataPointScanner"/>. /// </summary> /// <param name="dataBlockAllocationTable"><see cref="ArchiveFileAllocationTable"/> for the file to be scanned.</param> /// <param name="historianIDs">Historian ID's to scan.</param> /// <param name="startTime">Desired start time.</param> /// <param name="endTime">Desired end time.</param> /// <param name="lastHistorianID">Last read historian ID, or -1 to begin scan at first ID.</param> /// <param name="dataReadExceptionHandler">Read exception handler.</param> public TimeSortedDataPointScanner(ArchiveFileAllocationTable dataBlockAllocationTable, IEnumerable<int> historianIDs, TimeTag startTime, TimeTag endTime, int lastHistorianID, EventHandler<EventArgs<Exception>> dataReadExceptionHandler) { m_dataPointScanners = new List<DataPointScanner>(); // Create data point scanners for each historian ID foreach (int historianID in historianIDs) { // Start scan when last Historian ID is -1, otherwise if last historian ID is encountered, start reading at next point if (lastHistorianID == -1) m_dataPointScanners.Add(new DataPointScanner(dataBlockAllocationTable, historianID, startTime, endTime, dataReadExceptionHandler)); else if (lastHistorianID == historianID) lastHistorianID = -1; } }
/// <summary> /// Initializes <see cref="ArchiveDataBlockPointer"/> from the specified <paramref name="binaryImage"/>. /// </summary> /// <param name="binaryImage">Binary image to be used for initializing <see cref="ArchiveDataBlockPointer"/>.</param> /// <param name="startIndex">0-based starting index of initialization data in the <paramref name="binaryImage"/>.</param> /// <param name="length">Valid number of bytes in <paramref name="binaryImage"/> from <paramref name="startIndex"/>.</param> /// <returns>Number of bytes used from the <paramref name="binaryImage"/> for initializing <see cref="ArchiveDataBlockPointer"/>.</returns> public int Initialize(byte[] binaryImage, int startIndex, int length) { if (length - startIndex >= ByteCount) { // Binary image has sufficient data. HistorianID = EndianOrder.LittleEndian.ToInt32(binaryImage, startIndex); StartTime = new TimeTag(EndianOrder.LittleEndian.ToDouble(binaryImage, startIndex + 4)); return ByteCount; } else { // Binary image does not have sufficient data. return 0; } }
/// <summary> /// Deallocates the <see cref="DataBlock"/> to store new <see cref="ArchiveData"/>. /// </summary> public void Reset() { m_historianID = -1; m_startTime = TimeTag.MinValue; }
/// <summary> /// Initializes <see cref="ArchiveDataPoint"/> from the specified <paramref name="buffer"/>. /// </summary> /// <param name="buffer">Binary image to be used for initializing <see cref="ArchiveDataPoint"/>.</param> /// <param name="startIndex">0-based starting index of initialization data in the <paramref name="buffer"/>.</param> /// <param name="length">Valid number of bytes in <paramref name="buffer"/> from <paramref name="startIndex"/>.</param> /// <returns>Number of bytes used from the <paramref name="buffer"/> for initializing <see cref="ArchiveDataPoint"/>.</returns> public virtual int ParseBinaryImage(byte[] buffer, int startIndex, int length) { if (length >= FixedLength) { // Binary image has sufficient data. Flags = LittleEndian.ToInt16(buffer, startIndex + 4); Value = LittleEndian.ToSingle(buffer, startIndex + 6); Time = new TimeTag(LittleEndian.ToInt32(buffer, startIndex) + // Seconds ((decimal)(m_flags.GetMaskedValue(MillisecondMask) >> 5) / 1000)); // Milliseconds return FixedLength; } else { // Binary image does not have sufficient data. return 0; } }
/// <summary> /// Initializes <see cref="ArchiveDataPoint"/> from the specified <paramref name="buffer"/>. /// </summary> /// <param name="buffer">Binary image to be used for initializing <see cref="ArchiveDataPoint"/>.</param> /// <param name="startIndex">0-based starting index of initialization data in the <paramref name="buffer"/>.</param> /// <param name="length">Valid number of bytes in <paramref name="buffer"/> from <paramref name="startIndex"/>.</param> /// <returns>Number of bytes used from the <paramref name="buffer"/> for initializing <see cref="ArchiveDataPoint"/>.</returns> public virtual int ParseBinaryImage(byte[] buffer, int startIndex, int length) { if (length >= FixedLength) { // Binary image has sufficient data. Flags = LittleEndian.ToInt16(buffer, startIndex + 4); Value = LittleEndian.ToSingle(buffer, startIndex + 6); TimeTag value = new TimeTag(LittleEndian.ToInt32(buffer, startIndex) + // Seconds ((decimal)(m_flags.GetMaskedValue(MillisecondMask) >> 5) / 1000)); // Milliseconds // Make sure to properly validate timestamps for newly initialized or possibly corrupted blocks if (value.CompareTo(TimeTag.MinValue) < 0 || value.CompareTo(TimeTag.MaxValue) > 0) value = TimeTag.MinValue; Time = value; return FixedLength; } // Binary image does not have sufficient data. return 0; }
/// <summary> /// Reads <see cref="ArchiveData"/> points. /// </summary> /// <param name="historianID">Historian identifier for which <see cref="ArchiveData"/> points are to be retrieved.</param> /// <param name="startTime">Start <see cref="TimeTag"/> (in GMT) for the <see cref="ArchiveData"/> points to be retrieved.</param> /// <param name="endTime">End <see cref="TimeTag"/> (in GMT) for the <see cref="ArchiveData"/> points to be retrieved.</param> /// <returns><see cref="IEnumerable{T}"/> object containing zero or more <see cref="ArchiveData"/> points.</returns> public IEnumerable<IDataPoint> ReadData(int historianID, TimeTag startTime, TimeTag endTime) { // Ensure that the current file is open. if (!IsOpen) throw new InvalidOperationException(string.Format("\"{0}\" file is not open.", m_fileName)); // Ensure that the current file is active. if (m_fileType != ArchiveFileType.Active) throw new InvalidOperationException("Data can only be directly read from files that are Active."); // Ensure that the start and end time are valid. if (startTime > endTime) throw new ArgumentException("End Time preceeds Start Time in the specified timespan."); // Yeild to the rollover process if it is in progress. m_rolloverWaitHandle.WaitOne(); List<Info> dataFiles = new List<Info>(); if (startTime < m_fat.FileStartTime) { // Data is to be read from historic file(s). if (m_buildHistoricFileListThread.IsAlive) m_buildHistoricFileListThread.Join(); m_readSearchStartTimeTag = startTime; m_readSearchEndTimeTag = endTime; lock (m_historicArchiveFiles) { dataFiles.AddRange(m_historicArchiveFiles.FindAll(FindHistoricArchiveFileForRead)); } } if (endTime >= m_fat.FileStartTime) { // Data is to be read from the active file. Info activeFileInfo = new Info(); activeFileInfo.FileName = m_fileName; activeFileInfo.StartTimeTag = m_fat.FileStartTime; activeFileInfo.EndTimeTag = m_fat.FileEndTime; dataFiles.Add(activeFileInfo); } // Read data from all qualifying files. foreach (Info dataFile in dataFiles) { ArchiveFile file = new ArchiveFile(); IList<ArchiveDataBlock> dataBlocks; try { file.FileName = dataFile.FileName; file.StateFile = m_stateFile; file.IntercomFile = m_intercomFile; file.MetadataFile = m_metadataFile; file.Open(); dataBlocks = file.Fat.FindDataBlocks(historianID, startTime, endTime); if (dataBlocks.Count > 0) { // Data block before the first data block matching the search criteria might contain some data // for the specified search criteria, so look for such a data block and process its data. lock (file.Fat.DataBlockPointers) { for (int i = dataBlocks[0].Index - 1; i >= 0; i--) { if (file.Fat.DataBlockPointers[i].HistorianID == historianID) { foreach (ArchiveData data in file.Fat.DataBlockPointers[i].DataBlock.Read()) { if (data.Time >= startTime) yield return data; } break; } } } // Read data from rest of the data blocks and scan the last data block for data matching the // the search criteria as it may contain data beyond the timespan specified in the search. for (int i = 0; i < dataBlocks.Count; i++) { if (i < dataBlocks.Count - 1) { // Read all the data. foreach (ArchiveData data in dataBlocks[i].Read()) { yield return data; } } else { // Scan through the data block. foreach (ArchiveData data in dataBlocks[i].Read()) { if (data.Time <= endTime) yield return data; else yield break; } } } } } finally { if (file.IsOpen) { file.Close(); } } } }
// Process next data read private void m_readTimer_Elapsed(object sender, ElapsedEventArgs e) { List <IMeasurement> measurements = new List <IMeasurement>(); if (Monitor.TryEnter(m_readTimer)) { try { IDataPoint currentPoint = m_dataReader.Current; long timestamp = currentPoint.Time.ToDateTime().Ticks; MeasurementKey key; if (m_publicationTime == 0) { m_publicationTime = timestamp; } // Set next reasonable publication time while (m_publicationTime < timestamp) { m_publicationTime += m_publicationInterval; } do { // Lookup measurement key for this point key = MeasurementKey.LookUpOrCreate(m_instanceName, unchecked ((uint)currentPoint.HistorianID)); // Add current measurement to the collection for publication measurements.Add(new Measurement { Metadata = key.Metadata, Timestamp = m_simulateTimestamp ? DateTime.UtcNow.Ticks : timestamp, Value = currentPoint.Value, StateFlags = currentPoint.Quality.MeasurementQuality() }); // Attempt to move to next record if (m_dataReader.MoveNext()) { // Read record value currentPoint = m_dataReader.Current; timestamp = currentPoint.Time.ToDateTime().Ticks; } else { if (timestamp < m_stopTime.ToDateTime().Ticks&& m_startTime.ToDateTime().Ticks < timestamp) { // Could be attempting read with a future end time - in these cases attempt to re-read current data // from now to end time in case any new data as been archived in the mean-time m_startTime = new TimeTag(timestamp + Ticks.PerMillisecond); m_dataReader = m_archiveReader.ReadData(m_historianIDs, m_startTime, m_stopTime).GetEnumerator(); if (!m_dataReader.MoveNext()) { // Finished reading all available data m_readTimer.Enabled = false; if (m_autoRepeat) { ThreadPool.QueueUserWorkItem(StartDataReader); } else { OnProcessingComplete(); } } } else { // Finished reading all available data m_readTimer.Enabled = false; if (m_autoRepeat) { ThreadPool.QueueUserWorkItem(StartDataReader); } else { OnProcessingComplete(); } } break; } }while (timestamp <= m_publicationTime); } catch (InvalidOperationException) { // Pooled timer thread executed after last read, verify timer has stopped m_readTimer.Enabled = false; } finally { Monitor.Exit(m_readTimer); } } // Publish all measurements for this time interval if (measurements.Count > 0) { OnNewMeasurements(measurements); } }
// Read data implementation private IEnumerable<IDataPoint> ReadData(IEnumerable<int> historianIDs, TimeTag startTime, TimeTag endTime, IDataPoint resumeFrom, bool timeSorted) { // Yield to archive rollover process. m_rolloverWaitHandle.WaitOne(); // Ensure that the current file is open. if (!IsOpen) throw new InvalidOperationException(string.Format("\"{0}\" file is not open", m_fileName)); // Ensure that the current file is active. if (m_fileType != ArchiveFileType.Active) throw new InvalidOperationException("Data can only be directly read from files that are Active"); // Ensure that the start and end time are valid. if (startTime.CompareTo(endTime) > 0) throw new ArgumentException("End Time precedes Start Time in the specified time span"); List<Info> dataFiles = new List<Info>(); bool pendingRollover = false; bool usingActiveFile = false; if (startTime.CompareTo(m_fat.FileStartTime) < 0) { // Data is to be read from historic file(s) - make sure that the list has been built if (m_buildHistoricFileListThread.IsAlive) m_buildHistoricFileListThread.Join(); lock (m_historicArchiveFiles) { dataFiles.AddRange(m_historicArchiveFiles.FindAll(info => FindHistoricArchiveFileForRead(info, startTime, endTime))); } } if (endTime.CompareTo(m_fat.FileStartTime) >= 0) { // Data is to be read from the active file. Info activeFileInfo = new Info(m_fileName) { StartTimeTag = m_fat.FileStartTime, EndTimeTag = m_fat.FileEndTime }; dataFiles.Add(activeFileInfo); } // Read data from all qualifying files. foreach (Info dataFile in dataFiles) { ArchiveFile file = null; try { if (string.Compare(dataFile.FileName, m_fileName, StringComparison.OrdinalIgnoreCase) == 0) { // Read data from current file. usingActiveFile = true; file = this; // Atomically increment total number of readers for active file Interlocked.Increment(ref m_activeFileReaders); // Handle race conditions between rollover // and incrementing the active readers while (m_rolloverInProgress) { Interlocked.Decrement(ref m_activeFileReaders); m_rolloverWaitHandle.WaitOne(); Interlocked.Increment(ref m_activeFileReaders); } } else { // Read data from historic file. usingActiveFile = false; file = new ArchiveFile(); file.FileName = dataFile.FileName; file.StateFile = m_stateFile; file.IntercomFile = m_intercomFile; file.MetadataFile = m_metadataFile; file.FileAccessMode = FileAccess.Read; file.Open(); } // Create new data point scanner for the desired points in this file and given time range IArchiveFileScanner scanner; if (timeSorted) scanner = new TimeSortedArchiveFileScanner(); else scanner = new ArchiveFileScanner(); scanner.FileAllocationTable = file.Fat; scanner.HistorianIDs = historianIDs; scanner.StartTime = startTime; scanner.EndTime = endTime; scanner.ResumeFrom = resumeFrom; scanner.DataReadExceptionHandler = (sender, e) => OnDataReadException(e.Argument); // Reset resumeFrom to scan from beginning after picking up where left off from roll over resumeFrom = null; // Return data points foreach (IDataPoint dataPoint in scanner.Read()) { yield return dataPoint; // If a rollover needs to happen, we need to relinquish read lock and close file if (m_rolloverInProgress) { resumeFrom = dataPoint; pendingRollover = true; break; } } } finally { if (usingActiveFile) { // Atomically decrement active file reader count to signal in-process code that read is complete or yielded Interlocked.Decrement(ref m_activeFileReaders); } else if ((object)file != null) { file.Dispose(); } } if (pendingRollover) break; } if (pendingRollover) { // Recurse into this function with an updated start time and last read point ID so that read can // resume right where it left off - recursed function call will wait until rollover is complete foreach (IDataPoint dataPoint in ReadData(historianIDs, startTime, endTime, resumeFrom, timeSorted)) { yield return dataPoint; } } }
public static void DecodeOSCInto(ConcurrentQueue <OSCMessage> outQueue, byte[] data, int offset, int length, IPEndPoint senderIp = null, uint bundleId = 0, TimeTag bundleTimetag = new TimeTag(), uint bundleIdNested = 0) { if (offset == 0 && length > 20 && data[offset] == '#' && data[offset + 1] == 'b' && data[offset + 2] == 'u' && data[offset + 3] == 'n' && data[offset + 4] == 'd' && data[offset + 5] == 'l' && data[offset + 6] == 'e' && data[offset + 7] == 0) { int secs = IPAddress.NetworkToHostOrder(BitConverter.ToInt32(data, offset + 8)); int nanosecs = IPAddress.NetworkToHostOrder(BitConverter.ToInt32(data, offset + 12)); bundleTimetag = new TimeTag { secs = secs, nsecs = nanosecs }; offset += 16; while (offset < length) { int msglen = IPAddress.NetworkToHostOrder(BitConverter.ToInt32(data, offset)); offset += 4; DecodeOSCInto(outQueue, data, offset, msglen, senderIp, bundleId, bundleTimetag, bundleId); offset += msglen; } return; } OSCMessage msg = new OSCMessage(); msg.time = bundleTimetag; msg.sender = senderIp; msg.bundleId = bundleIdNested; int strlen = 0; while (data[offset + strlen] != 0) { strlen++; } msg.path = System.Text.Encoding.UTF8.GetString(data, offset, strlen); offset += strlen; offset = (offset + 4) & ~3; while (data[offset] != ',') { offset++; } int typetags = offset; while (data[offset] != 0) { offset++; } msg.typeTag = System.Text.Encoding.ASCII.GetString(data, typetags, offset - typetags); offset = (offset + 4) & ~3; //msg.arguments = new object[msg.typeTag.Length]; List <object> topLevelArguments = new List <object>(); List <List <object> > nested = new List <List <object> >(); nested.Add(topLevelArguments); for (int i = 1; i < msg.typeTag.Length; i++) { // Debug.Log("doing type tag " + msg.typeTag[i] + " offset: " + offset); object obj; switch (msg.typeTag[i]) { case '[': nested.Add(new List <object>()); break; case ']': if (nested.Count > 1) { obj = nested[nested.Count - 1].ToArray(); nested.RemoveAt(nested.Count - 1); nested[nested.Count - 1].Add(obj); } break; default: obj = ParseType(msg.typeTag[i], data, ref offset); nested[nested.Count - 1].Add(obj); break; } } if (nested.Count != 1) { CryWolf("Invalid nested count (mismatched start and end array in OSC message): " + msg.typeTag); } msg.arguments = topLevelArguments.ToArray(); outQueue.Enqueue(msg); }
/// <summary> /// Returns all <see cref="ArchiveDataBlock"/>s in the <see cref="ArchiveFile"/> for the specified <paramref name="historianID"/> with <see cref="ArchiveDataPoint"/>s between the specified <paramref name="startTime"/> and <paramref name="endTime"/>. /// </summary> /// <param name="historianID">Historian identifier.</param> /// <param name="startTime">Start <see cref="TimeTag"/>.</param> /// <param name="endTime">End <see cref="TimeTag"/>.</param> /// <param name="preRead">true to pre-read data to locate write cursor.</param> /// <returns>A collection of <see cref="ArchiveDataBlock"/>s.</returns> public List<ArchiveDataBlock> FindDataBlocks(int historianID, TimeTag startTime, TimeTag endTime, bool preRead = true) { if ((object)startTime == null) startTime = TimeTag.MaxValue; if ((object)endTime == null) endTime = TimeTag.MaxValue; List<ArchiveDataBlockPointer> blockPointers; lock (m_dataBlockPointers) { // Get all block pointers for given point ID over specified time range blockPointers = m_dataBlockPointers.FindAll(dataBlockPointer => dataBlockPointer.Matches(historianID, startTime, endTime)); // Look for pointer to data block on the borders of the specified range which may contain data if (!(startTime == TimeTag.MinValue || endTime == TimeTag.MaxValue)) { // There are 2 different search criteria for this: // 1) If matching data block pointers have been found, then find data block pointer before the first matching data block pointer. // or // 2) Find the last data block pointer in the time range of TimeTag.MinValue to m_searchEndTime. TimeTag searchEndTime = endTime; if (blockPointers.Count > 0) searchEndTime = new TimeTag(blockPointers.First().StartTime.Value - 1.0D); ArchiveDataBlockPointer borderMatch = m_dataBlockPointers.LastOrDefault(dataBlockPointer => dataBlockPointer.Matches(historianID, TimeTag.MinValue, searchEndTime)); if ((object)borderMatch != null) blockPointers.Insert(0, borderMatch); } } // Return list of data blocks for given block pointers return blockPointers.Select(pointer => pointer.GetDataBlock(preRead)).ToList(); }
/// <summary> /// Returns all <see cref="ArchiveDataBlock"/>s in the <see cref="ArchiveFile"/> for the specified <paramref name="historianID"/> with <see cref="ArchiveDataPoint"/> points later than the specified <paramref name="startTime"/>. /// </summary> /// <param name="historianID">Historian identifier.</param> /// <param name="startTime">Start <see cref="TimeTag"/>.</param> /// <param name="preRead">true to pre-read data to locate write cursor.</param> /// <returns>A collection of <see cref="ArchiveDataBlock"/>s.</returns> public List <ArchiveDataBlock> FindDataBlocks(int historianID, TimeTag startTime, bool preRead = true) { return(FindDataBlocks(historianID, startTime, TimeTag.MaxValue, preRead)); }
public override string ToString() { return(TimeTag.ToString("MM/dd/yyyy,HH:mm:ss,") + SensorName + "," + SensorState + "," + OccupantId + "," + ActivityLabel + "," + Comments); }
/// <summary> /// Returns an <see cref="ArchiveDataBlock"/> for writing <see cref="ArchiveDataPoint"/>s for the specified <paramref name="historianID"/>. /// </summary> /// <param name="historianID">Historian identifier for which the <see cref="ArchiveDataBlock"/> is being requested.</param> /// <param name="dataTime"><see cref="TimeTag"/> of the <see cref="ArchiveDataPoint"/> to be written to the <see cref="ArchiveDataBlock"/>.</param> /// <param name="blockIndex"><see cref="ArchiveDataBlock.Index"/> of the <see cref="ArchiveDataBlock"/> last used for writing <see cref="ArchiveDataPoint"/>s for the <paramref name="historianID"/>.</param> /// <returns><see cref="ArchiveDataBlock"/> object if available; otherwise null if all <see cref="ArchiveDataBlock"/>s have been allocated.</returns> internal ArchiveDataBlock RequestDataBlock(int historianID, TimeTag dataTime, int blockIndex) { ArchiveDataBlock dataBlock = null; ArchiveDataBlockPointer dataBlockPointer = null; if (blockIndex >= 0 && blockIndex < m_dataBlockCount) { // Valid data block index is specified, so retrieve the corresponding data block. lock (m_dataBlockPointers) { dataBlockPointer = m_dataBlockPointers[blockIndex]; } dataBlock = dataBlockPointer.DataBlock; if (!dataBlockPointer.IsAllocated && dataBlock.SlotsUsed > 0) { // Clear existing data from the data block since it is unallocated. dataBlock.Reset(); } else if (dataBlockPointer.IsAllocated && (dataBlockPointer.HistorianID != historianID || (dataBlockPointer.HistorianID == historianID && dataBlock.SlotsAvailable == 0))) { // Search for a new data block since the suggested data block cannot be used. blockIndex = -1; } } if (blockIndex < 0) { // Negative data block index is specified indicating a search must be performed for a data block. dataBlock = FindLastDataBlock(historianID); if ((object)dataBlock != null && dataBlock.SlotsAvailable == 0) { // Previously used data block is full. dataBlock = null; } if ((object)dataBlock == null) { // Look for the first unallocated data block. dataBlock = FindDataBlock(-1); if ((object)dataBlock == null) { // Extend the file for historic writes only. if (m_parent.FileType == ArchiveFileType.Historic) { Extend(); dataBlock = m_dataBlockPointers[m_dataBlockPointers.Count - 1].DataBlock; } } else { // Reset the unallocated data block if there is data in it. if (dataBlock.SlotsUsed > 0) { dataBlock.Reset(); } } } // Get the pointer to the data block so that its information can be updated if necessary. if ((object)dataBlock == null) { dataBlockPointer = null; } else { lock (m_dataBlockPointers) { dataBlockPointer = m_dataBlockPointers[dataBlock.Index]; } } } if ((object)dataBlockPointer != null && !dataBlockPointer.IsAllocated) { // Mark the data block as allocated. dataBlockPointer.HistorianID = historianID; dataBlockPointer.StartTime = dataTime; // Set the file start time if not set. if (m_fileStartTime == TimeTag.MinValue) { m_fileStartTime = dataTime; } // Persist data block information to disk. lock (m_parent.FileData) { // We'll write information about the just allocated data block to the file. m_parent.FileData.Seek(DataLength + ArrayDescriptorLength + (dataBlockPointer.Index * ArchiveDataBlockPointer.FixedLength), SeekOrigin.Begin); dataBlockPointer.CopyBinaryImageToStream(m_parent.FileData); // We'll also write the fixed part of the FAT data that resides at the end. m_parent.FileData.Seek(-m_fixedTableRegion.BinaryLength, SeekOrigin.End); // Copy generated binary image to stream m_fixedTableRegion.CopyBinaryImageToStream(m_parent.FileData); if (!m_parent.CacheWrites) { m_parent.FileData.Flush(); } } // Re-fetch the data block with updated information after allocation. dataBlock = dataBlockPointer.DataBlock; } return(dataBlock); }
public static void EncodeOSCBundleInto(byte[] data, ref int offset, List <OSCMessage> packets, TimeTag tt) { Array.Copy(new byte[] { (byte)'#', (byte)'b', (byte)'u', (byte)'n', (byte)'d', (byte)'l', (byte)'e', 0 }, 0, data, offset, 8); offset += 8; Array.Copy(BitConverter.GetBytes(IPAddress.HostToNetworkOrder((int)tt.secs)), 0, data, offset, 4); Array.Copy(BitConverter.GetBytes(IPAddress.HostToNetworkOrder((int)tt.nsecs)), 0, data, offset + 4, 4); offset += 8; foreach (var msg in packets) { int startOffset = offset; offset += 4; EncodeOSCInto(data, ref offset, msg); int endOffset = offset; Array.Copy(BitConverter.GetBytes(IPAddress.HostToNetworkOrder((int)(endOffset - startOffset - 4))), 0, data, startOffset, 4); } }
private bool FindHistoricArchiveFileForRead(Info fileInfo, TimeTag startTime, TimeTag endTime) { return ((object)fileInfo != null && ((startTime.CompareTo(fileInfo.StartTimeTag) >= 0 && startTime.CompareTo(fileInfo.EndTimeTag) <= 0) || (endTime.CompareTo(fileInfo.StartTimeTag) >= 0 && endTime.CompareTo(fileInfo.EndTimeTag) <= 0) || (startTime.CompareTo(fileInfo.StartTimeTag) < 0 && endTime.CompareTo(fileInfo.EndTimeTag) > 0))); }
/// <summary> /// Returns all <see cref="ArchiveDataBlock"/>s in the <see cref="ArchiveFile"/> for the specified <paramref name="historianID"/> with <see cref="ArchiveDataPoint"/>s between the specified <paramref name="startTime"/> and <paramref name="endTime"/>. /// </summary> /// <param name="historianID">Historian identifier.</param> /// <param name="startTime">Start <see cref="TimeTag"/>.</param> /// <param name="endTime">End <see cref="TimeTag"/>.</param> /// <param name="preRead">true to pre-read data to locate write cursor.</param> /// <returns>A collection of <see cref="ArchiveDataBlock"/>s.</returns> public List <ArchiveDataBlock> FindDataBlocks(int historianID, TimeTag startTime, TimeTag endTime, bool preRead = true) { if ((object)startTime == null) { startTime = TimeTag.MaxValue; } if ((object)endTime == null) { endTime = TimeTag.MaxValue; } List <ArchiveDataBlockPointer> blockPointers; lock (m_dataBlockPointers) { // Get all block pointers for given point ID over specified time range blockPointers = m_dataBlockPointers.FindAll(dataBlockPointer => dataBlockPointer.Matches(historianID, startTime, endTime)); // Look for pointer to data block on the borders of the specified range which may contain data if (!(startTime == TimeTag.MinValue || endTime == TimeTag.MaxValue)) { // There are 2 different search criteria for this: // 1) If matching data block pointers have been found, then find data block pointer before the first matching data block pointer. // or // 2) Find the last data block pointer in the time range of TimeTag.MinValue to m_searchEndTime. TimeTag searchEndTime = endTime; if (blockPointers.Count > 0) { searchEndTime = new TimeTag(blockPointers.First().StartTime.Value - 1.0D); } ArchiveDataBlockPointer borderMatch = m_dataBlockPointers.LastOrDefault(dataBlockPointer => dataBlockPointer.Matches(historianID, TimeTag.MinValue, searchEndTime)); if ((object)borderMatch != null) { blockPointers.Insert(0, borderMatch); } } } // Return list of data blocks for given block pointers return(blockPointers.Select(pointer => pointer.GetDataBlock(preRead)).ToList()); }
public override string ToString() { return(TimeTag.ToString("MM/dd/yyyy,HH:mm:ss,") + Sensor.Name + "," + SensorState + "," + Resident.Name + "," + Activity.Name + "," + Comments); }
/// <summary> /// Reads all <see cref="IDataPoint"/>s in time sorted order for the specified historian IDs. /// </summary> /// <returns>Each <see cref="IDataPoint"/> for the specified historian IDs.</returns> public IEnumerable <IDataPoint> Read() { List <IEnumerator <IDataPoint> > enumerators = new List <IEnumerator <IDataPoint> >(); // Setup enumerators for scanners that have data foreach (DataPointScanner scanner in GetScanners()) { IEnumerator <IDataPoint> enumerator = scanner.Read().GetEnumerator(); // Add enumerator to the list if it has at least one value if (enumerator.MoveNext()) { enumerators.Add(enumerator); } } // Start publishing data points in time-sorted order if (enumerators.Count > 0) { List <int> completed = new List <int>(); IDataPoint dataPoint; do { TimeTag publishTime = TimeTag.MaxValue; // Find minimum publication time for current values foreach (IEnumerator <IDataPoint> enumerator in enumerators) { dataPoint = enumerator.Current; if (dataPoint.Time.CompareTo(publishTime) < 0) { publishTime = dataPoint.Time; } } int index = 0; // Publish all values at the current time foreach (IEnumerator <IDataPoint> enumerator in enumerators) { bool enumerationComplete = false; dataPoint = enumerator.Current; if (dataPoint.Time.CompareTo(publishTime) <= 0) { // Attempt to advance to next data point, tracking completed enumerators if (!enumerator.MoveNext()) { enumerationComplete = true; completed.Add(index); } yield return(dataPoint); // Make sure any point IDs with duplicated times directly follow if (!enumerationComplete) { while (enumerator.Current.Time.CompareTo(publishTime) <= 0) { yield return(enumerator.Current); if (!enumerator.MoveNext()) { completed.Add(index); break; } } } } index++; } // Remove completed enumerators if (completed.Count > 0) { completed.Sort(); for (int i = completed.Count - 1; i >= 0; i--) { enumerators.RemoveAt(completed[i]); } completed.Clear(); } }while (enumerators.Count > 0); } }
/// <summary> /// Initializes <see cref="PacketType1"/> from the specified <paramref name="buffer"/>. /// </summary> /// <param name="buffer">Binary image to be used for initializing <see cref="PacketType1"/>.</param> /// <param name="startIndex">0-based starting index of initialization data in the <paramref name="buffer"/>.</param> /// <param name="length">Valid number of bytes in <paramref name="buffer"/> from <paramref name="startIndex"/>.</param> /// <returns>Number of bytes used from the <paramref name="buffer"/> for initializing <see cref="PacketType1"/>.</returns> public override int ParseBinaryImage(byte[] buffer, int startIndex, int length) { if (length >= FixedLength) { // Binary image has sufficient data. short packetID = LittleEndian.ToInt16(buffer, startIndex); if (packetID != TypeID) throw new ArgumentException(string.Format("Unexpected packet id '{0}' (expected '{1}')", packetID, TypeID)); // We have a binary image with the correct packet id. HistorianID = LittleEndian.ToInt32(buffer, startIndex + 2); Time = new TimeTag(LittleEndian.ToDouble(buffer, startIndex + 6)); Quality = (Quality)(LittleEndian.ToInt32(buffer, startIndex + 14)); Value = LittleEndian.ToSingle(buffer, startIndex + 18); // We'll send an "ACK" to the sender if this is the last packet in the transmission. if (length == FixedLength) PreProcessHandler = PreProcess; return FixedLength; } else { // Binary image does not have sufficient data. return 0; } }
// Kick start read process for historian private void StartDataReader(object state) { MeasurementKey[] requestedKeys = SupportsTemporalProcessing ? RequestedOutputMeasurementKeys : OutputMeasurements.MeasurementKeys().ToArray(); if (Enabled && (object)m_archiveReader != null && (object)requestedKeys != null && requestedKeys.Length > 0) { m_historianIDs = requestedKeys.Select(key => unchecked((int)key.ID)).ToArray(); m_publicationTime = 0; // Start data read from historian lock (m_readTimer) { m_startTime = base.StartTimeConstraint < TimeTag.MinValue ? TimeTag.MinValue : base.StartTimeConstraint > TimeTag.MaxValue ? TimeTag.MaxValue : new TimeTag(base.StartTimeConstraint); m_stopTime = base.StopTimeConstraint < TimeTag.MinValue ? TimeTag.MinValue : base.StopTimeConstraint > TimeTag.MaxValue ? TimeTag.MaxValue : new TimeTag(base.StopTimeConstraint); m_dataReader = m_archiveReader.ReadData(m_historianIDs, m_startTime, m_stopTime).GetEnumerator(); m_readTimer.Enabled = m_dataReader.MoveNext(); if (m_readTimer.Enabled) { OnStatusMessage(MessageLevel.Info, "Starting historical data read..."); } else { OnStatusMessage(MessageLevel.Info, "No historical data was available to read for given timeframe."); OnProcessingComplete(); } } } else { m_readTimer.Enabled = false; OnStatusMessage(MessageLevel.Info, "No measurement keys have been requested for reading, historian reader is idle."); OnProcessingComplete(); } }
// Process next data read private void m_readTimer_Elapsed(object sender, ElapsedEventArgs e) { List<IMeasurement> measurements = new List<IMeasurement>(); if (Monitor.TryEnter(m_readTimer)) { try { IDataPoint currentPoint = m_dataReader.Current; long timestamp = currentPoint.Time.ToDateTime().Ticks; MeasurementKey key; if (m_publicationTime == 0) m_publicationTime = timestamp; // Set next reasonable publication time while (m_publicationTime < timestamp) m_publicationTime += m_publicationInterval; do { // Lookup measurement key for this point key = MeasurementKey.LookUpOrCreate(m_instanceName, unchecked((uint)currentPoint.HistorianID)); // Add current measurement to the collection for publication measurements.Add(new Measurement { Metadata = key.Metadata, Timestamp = m_simulateTimestamp ? DateTime.UtcNow.Ticks : timestamp, Value = currentPoint.Value, StateFlags = currentPoint.Quality.MeasurementQuality() }); // Attempt to move to next record if (m_dataReader.MoveNext()) { // Read record value currentPoint = m_dataReader.Current; timestamp = currentPoint.Time.ToDateTime().Ticks; } else { if (timestamp < m_stopTime.ToDateTime().Ticks && m_startTime.ToDateTime().Ticks < timestamp) { // Could be attempting read with a future end time - in these cases attempt to re-read current data // from now to end time in case any new data as been archived in the mean-time m_startTime = new TimeTag(timestamp + Ticks.PerMillisecond); m_dataReader = m_archiveReader.ReadData(m_historianIDs, m_startTime, m_stopTime).GetEnumerator(); if (!m_dataReader.MoveNext()) { // Finished reading all available data m_readTimer.Enabled = false; if (m_autoRepeat) ThreadPool.QueueUserWorkItem(StartDataReader); else OnProcessingComplete(); } } else { // Finished reading all available data m_readTimer.Enabled = false; if (m_autoRepeat) ThreadPool.QueueUserWorkItem(StartDataReader); else OnProcessingComplete(); } break; } } while (timestamp <= m_publicationTime); } catch (InvalidOperationException) { // Pooled timer thread executed after last read, verify timer has stopped m_readTimer.Enabled = false; } finally { Monitor.Exit(m_readTimer); } } // Publish all measurements for this time interval if (measurements.Count > 0) OnNewMeasurements(measurements); }
/// <summary> /// Initializes <see cref="StateRecordData"/> from the specified <paramref name="binaryImage"/>. /// </summary> /// <param name="binaryImage">Binary image to be used for initializing <see cref="StateRecordData"/>.</param> /// <param name="startIndex">0-based starting index of initialization data in the <paramref name="binaryImage"/>.</param> /// <param name="length">Valid number of bytes in <paramref name="binaryImage"/> from <paramref name="startIndex"/>.</param> /// <returns>Number of bytes used from the <paramref name="binaryImage"/> for initializing <see cref="StateRecordData"/>.</returns> public override int Initialize(byte[] binaryImage, int startIndex, int length) { if (length - startIndex >= ByteCount) { // Binary image has sufficient data. Time = new TimeTag(EndianOrder.LittleEndian.ToDouble(binaryImage, startIndex)); Flags = EndianOrder.LittleEndian.ToInt32(binaryImage, startIndex + 8); Value = EndianOrder.LittleEndian.ToSingle(binaryImage, startIndex + 12); return ByteCount; } else { // Binary image does not have sufficient data. return 0; } }
/// <summary> /// Reads <see cref="ArchiveData"/> points. /// </summary> /// <param name="historianID">Historian identifier for which <see cref="ArchiveData"/> points are to be retrieved.</param> /// <param name="startTime">Start <see cref="TimeTag"/> (in GMT) for the <see cref="ArchiveData"/> points to be retrieved.</param> /// <returns><see cref="IEnumerable{T}"/> object containing zero or more <see cref="ArchiveData"/> points.</returns> public IEnumerable<IDataPoint> ReadData(int historianID, TimeTag startTime) { return ReadData(historianID, startTime, TimeTag.MaxValue); }
/// <summary> /// Initializes a new instance of the <see cref="StateRecordData"/> class. /// </summary> /// <param name="historianID">Historian identifier of <see cref="StateRecordData"/>.</param> /// <param name="time"><see cref="TimeTag"/> of <see cref="StateRecordData"/>.</param> /// <param name="value">Floating-point value of <see cref="StateRecordData"/>.</param> /// <param name="quality"><see cref="Quality"/> of <see cref="StateRecordData"/>.</param> public StateRecordData(int historianID, TimeTag time, float value, Quality quality) : base(historianID, time, value, quality) { }
private void WriteToHistoricArchiveFile(ArchiveData[] items) { if (m_buildHistoricFileListThread.IsAlive) // Wait until the historic file list has been built. m_buildHistoricFileListThread.Join(); OnHistoricDataWriteStart(); Dictionary<int, List<ArchiveData>> sortedPointData = new Dictionary<int, List<ArchiveData>>(); // First we'll seperate all point data by ID. for (int i = 0; i < items.Length; i++) { if (!sortedPointData.ContainsKey(items[i].HistorianID)) { sortedPointData.Add(items[i].HistorianID, new List<ArchiveData>()); } sortedPointData[items[i].HistorianID].Add(items[i]); } ProcessProgress<int> historicWriteProgress = new ProcessProgress<int>("HistoricWrite"); historicWriteProgress.Total = items.Length; foreach (int pointID in sortedPointData.Keys) { // We'll sort the point data for the current point ID by time. sortedPointData[pointID].Sort(); ArchiveFile historicFile = null; ArchiveDataBlock historicFileBlock = null; try { for (int i = 0; i < sortedPointData[pointID].Count; i++) { if (historicFile == null) { // We'll try to find a historic file when the current point data belongs. Info historicFileInfo; m_writeSearchTimeTag = sortedPointData[pointID][i].Time; lock (m_historicArchiveFiles) { historicFileInfo = m_historicArchiveFiles.Find(FindHistoricArchiveFileForWrite); } if (historicFileInfo != null) { // Found a historic file where the data can be written. historicFile = new ArchiveFile(); historicFile.FileName = historicFileInfo.FileName; historicFile.StateFile = m_stateFile; historicFile.IntercomFile = m_intercomFile; historicFile.MetadataFile = m_metadataFile; historicFile.Open(); } } if (historicFile != null) { if (sortedPointData[pointID][i].Time.CompareTo(historicFile.Fat.FileStartTime) >= 0 && sortedPointData[pointID][i].Time.CompareTo(historicFile.Fat.FileEndTime) <= 0) { // The current point data belongs to the current historic archive file. if (historicFileBlock == null || historicFileBlock.SlotsAvailable == 0) { // Request a new or previously used data block for point data. historicFileBlock = historicFile.Fat.RequestDataBlock(pointID, sortedPointData[pointID][i].Time, -1); } historicFileBlock.Write(sortedPointData[pointID][i]); historicFile.Fat.DataPointsReceived++; historicFile.Fat.DataPointsArchived++; if (i == sortedPointData[pointID].Count() - 1) { // Last piece of data for the point, so we close the currently open file. historicFile.Save(); historicFile.Dispose(); historicFile = null; historicFileBlock = null; } historicWriteProgress.Complete++; } else { // The current point data doesn't belong to the current historic archive file, so we have // to write all the point data we have so far for the current historic archive file to it. i--; historicFile.Dispose(); historicFile = null; historicFileBlock = null; } } } // Notify of progress per point. historicWriteProgress.ProgressMessage = string.Format("Wrote historic data for point id {0} ({1} of {2}).", pointID, "{0}", "{1}"); OnHistoricDataWriteProgress(historicWriteProgress); } catch (Exception ex) { // Free-up used memory. if (historicFile != null) { try { historicFile.Dispose(); historicFile = null; } catch { } } // Notify of the exception. OnHistoricDataWriteException(ex); } } OnHistoricDataWriteComplete(); }
/// <summary> /// Initializes a new instance of the <see cref="ArchiveFileAllocationTable"/> class. /// </summary> /// <param name="parent">An <see cref="ArchiveFile"/> object.</param> internal ArchiveFileAllocationTable(ArchiveFile parent) { m_parent = parent; m_dataBlockPointers = new List<ArchiveDataBlockPointer>(); m_fixedTableRegion = new FixedTableRegion(this); m_variableTableRegion = new VariableTableRegion(this); if (m_parent.FileData.Length == 0) { // File is brand new. m_fileStartTime = TimeTag.MinValue; m_fileEndTime = TimeTag.MinValue; m_dataBlockSize = m_parent.DataBlockSize; m_dataBlockCount = ArchiveFile.MaximumDataBlocks(m_parent.FileSize, m_parent.DataBlockSize); for (int i = 0; i < m_dataBlockCount; i++) { m_dataBlockPointers.Add(new ArchiveDataBlockPointer(m_parent, i)); } } else { // Existing file, read table regions: // Seek to beginning of fixed table region m_parent.FileData.Seek(-m_fixedTableRegion.BinaryLength, SeekOrigin.End); // Parse fixed table region m_fixedTableRegion.ParseBinaryImageFromStream(m_parent.FileData); // Seek to beginning of variable table region (above fixed from bottom of file) m_parent.FileData.Seek(-(m_fixedTableRegion.BinaryLength + m_variableTableRegion.BinaryLength), SeekOrigin.End); // Parse variable table region m_variableTableRegion.ParseBinaryImageFromStream(m_parent.FileData); } }
private void update() { timeTB.Text = "今天是:" + TimeTag.GetTime2(); }
/// <summary> /// Reads <see cref="ArchiveDataPoint"/>s from the <see cref="ArchiveFile"/>. /// </summary> /// <param name="historianIDs">Historian identifiers for which <see cref="ArchiveDataPoint"/>s are to be retrieved.</param> /// <param name="startTime">Start <see cref="TimeTag"/> (in UTC) for the <see cref="ArchiveDataPoint"/>s to be retrieved.</param> /// <param name="endTime">End <see cref="TimeTag"/> (in UTC) for the <see cref="ArchiveDataPoint"/>s to be retrieved.</param> /// <param name="timeSorted">Indicates whether the data retrieved from the archive should be time sorted.</param> /// <returns><see cref="IEnumerable{T}"/> object containing zero or more <see cref="ArchiveDataPoint"/>s.</returns> public IEnumerable<IDataPoint> ReadData(IEnumerable<int> historianIDs, TimeTag startTime, TimeTag endTime, bool timeSorted = true) { return ReadData(historianIDs, startTime, endTime, null, timeSorted); }