//private bool IsOldHistoricArchiveFile(Info fileInfo) //{ // return ((object)fileInfo != null && // !string.IsNullOrEmpty(m_archiveOffloadLocation) && // string.Compare(FilePath.GetDirectoryName(m_archiveOffloadLocation), FilePath.GetDirectoryName(fileInfo.FileName), true) == 0); //} #endregion #region [ Queue Delegates ] private void WriteToCurrentArchiveFile(IDataPoint[] items) { Dictionary<int, List<IDataPoint>> sortedDataPoints = new Dictionary<int, List<IDataPoint>>(); // First we'll seperate all point data by ID. for (int i = 0; i < items.Length; i++) { if (!sortedDataPoints.ContainsKey(items[i].HistorianID)) { sortedDataPoints.Add(items[i].HistorianID, new List<IDataPoint>()); } sortedDataPoints[items[i].HistorianID].Add(items[i]); } IntercomRecord system = m_intercomFile.Read(1); foreach (int pointID in sortedDataPoints.Keys) { // Initialize local variables. StateRecord state = m_stateFile.Read(pointID); MetadataRecord metadata = m_metadataFile.Read(pointID); IDataPoint dataPoint; for (int i = 0; i < sortedDataPoints[pointID].Count; i++) { dataPoint = sortedDataPoints[pointID][i]; // Ensure that the received data is to be archived. if (state == null || metadata == null || !metadata.GeneralFlags.Enabled) { OnOrphanDataReceived(dataPoint); continue; } // Ensure that data is not far out in to the future. if (dataPoint.Time > DateTime.UtcNow.AddMinutes(m_leadTimeTolerance)) { OnFutureDataReceived(dataPoint); continue; } // Perform quality check if data quality is not set. if ((int)dataPoint.Quality == 31) { // Note: Here we're checking if the Quality is 31 instead of -1 because the quality value is stored // in the first 5 bits (QualityMask = 31) of Flags in the point data. Initially when the Quality is // set to -1, all the bits Flags (a 32-bit integer) are set to 1. And therefore, when we get the // Quality, which is a masked value of Flags, we get 31 and not -1. switch (metadata.GeneralFlags.DataType) { case DataType.Analog: if (dataPoint.Value >= metadata.AnalogFields.HighRange) dataPoint.Quality = Quality.UnreasonableHigh; else if (dataPoint.Value >= metadata.AnalogFields.HighAlarm) dataPoint.Quality = Quality.ValueAboveHiHiAlarm; else if (dataPoint.Value >= metadata.AnalogFields.HighWarning) dataPoint.Quality = Quality.ValueAboveHiAlarm; else if (dataPoint.Value <= metadata.AnalogFields.LowRange) dataPoint.Quality = Quality.UnreasonableLow; else if (dataPoint.Value <= metadata.AnalogFields.LowAlarm) dataPoint.Quality = Quality.ValueBelowLoLoAlarm; else if (dataPoint.Value <= metadata.AnalogFields.LowWarning) dataPoint.Quality = Quality.ValueBelowLoAlarm; else dataPoint.Quality = Quality.Good; break; case DataType.Digital: if ((int)dataPoint.Value == metadata.DigitalFields.AlarmState) dataPoint.Quality = Quality.LogicalAlarm; else dataPoint.Quality = Quality.Good; break; } } // Update information about the latest data point received. if (dataPoint.Time.CompareTo(system.LatestDataTime) > 0) { system.LatestDataID = dataPoint.HistorianID; system.LatestDataTime = dataPoint.Time; m_intercomFile.Write(1, system); } // Check for data that out-of-sequence based on it's time. if (dataPoint.Time.CompareTo(state.PreviousData.Time) <= 0) { if (dataPoint.Time == state.PreviousData.Time) { // Discard data that is an exact duplicate of data in line for archival. if (dataPoint.Value == state.PreviousData.Value && dataPoint.Quality == state.PreviousData.Quality) return; } else { // Queue out-of-sequence data for processing if it is not be discarded. if (!m_discardOutOfSequenceData) m_outOfSequenceDataQueue.Add(dataPoint); OnOutOfSequenceDataReceived(dataPoint); return; } } // [BEGIN] Data compression bool archiveData = false; bool calculateSlopes = false; float compressionLimit = metadata.AnalogFields.CompressionLimit; // Set the compression limit to a very low number for digital points. if (metadata.GeneralFlags.DataType == DataType.Digital) compressionLimit = 0.000000001f; state.CurrentData = new StateRecordDataPoint(dataPoint); if (state.ArchivedData.IsEmpty) { // This is the first time data is received. state.CurrentData = new StateRecordDataPoint(-1); archiveData = true; } else if (state.PreviousData.IsEmpty) { // This is the second time data is received. calculateSlopes = true; } else { // Process quality-based alarming if enabled. if (metadata.GeneralFlags.AlarmEnabled) { if (metadata.AlarmFlags.Value.CheckBits(BitExtensions.BitVal((int)state.CurrentData.Quality))) { // Current data quality warrants alarming based on the alarming settings. float delay = 0; switch (metadata.GeneralFlags.DataType) { case DataType.Analog: delay = metadata.AnalogFields.AlarmDelay; break; case DataType.Digital: delay = metadata.DigitalFields.AlarmDelay; break; } // Dispatch the alarm immediately or after a given time based on settings. if (delay > 0) { // Wait before dispatching alarm. double first; if (m_delayedAlarmProcessing.TryGetValue(dataPoint.HistorianID, out first)) { if (state.CurrentData.Time.Value - first > delay) { // Wait is now over, dispatch the alarm. m_delayedAlarmProcessing.Remove(dataPoint.HistorianID); OnProcessAlarmNotification(state); } } else { m_delayedAlarmProcessing.Add(state.HistorianID, state.CurrentData.Time.Value); } } else { // Dispatch the alarm immediately. OnProcessAlarmNotification(state); } } else { m_delayedAlarmProcessing.Remove(dataPoint.HistorianID); } } if (m_compressData) { // Data is to be compressed. if (metadata.CompressionMinTime > 0 && state.CurrentData.Time.Value - state.ArchivedData.Time.Value < metadata.CompressionMinTime) { // CompressionMinTime is in effect. archiveData = false; calculateSlopes = false; } else if (state.CurrentData.Quality != state.ArchivedData.Quality || state.CurrentData.Quality != state.PreviousData.Quality || (metadata.CompressionMaxTime > 0 && state.PreviousData.Time.Value - state.ArchivedData.Time.Value > metadata.CompressionMaxTime)) { // Quality changed or CompressionMaxTime is exceeded. dataPoint = new ArchiveDataPoint(state.PreviousData); archiveData = true; calculateSlopes = true; } else { // Perform a compression test. double slope1; double slope2; double currentSlope; slope1 = (state.CurrentData.Value - (state.ArchivedData.Value + compressionLimit)) / (state.CurrentData.Time.Value - state.ArchivedData.Time.Value); slope2 = (state.CurrentData.Value - (state.ArchivedData.Value - compressionLimit)) / (state.CurrentData.Time.Value - state.ArchivedData.Time.Value); currentSlope = (state.CurrentData.Value - state.ArchivedData.Value) / (state.CurrentData.Time.Value - state.ArchivedData.Time.Value); if (slope1 >= state.Slope1) state.Slope1 = slope1; if (slope2 <= state.Slope2) state.Slope2 = slope2; if (currentSlope <= state.Slope1 || currentSlope >= state.Slope2) { dataPoint = new ArchiveDataPoint(state.PreviousData); archiveData = true; calculateSlopes = true; } } } else { // Data is not to be compressed. dataPoint = new ArchiveDataPoint(state.PreviousData); archiveData = true; } } // [END] Data compression // [BEGIN] Data archival m_fat.DataPointsReceived++; if (archiveData) { if (dataPoint.Time.CompareTo(m_fat.FileStartTime) >= 0) { // Data belongs to this file. ArchiveDataBlock dataBlock; lock (m_dataBlocks) { dataBlock = m_dataBlocks[dataPoint.HistorianID - 1]; } if (dataBlock == null || dataBlock.SlotsAvailable == 0) { // Need to find a data block for writting the data. if (dataBlock != null) { dataBlock = null; state.ActiveDataBlockIndex = -1; } if (state.ActiveDataBlockIndex >= 0) { // Retrieve previously used data block. dataBlock = m_fat.RequestDataBlock(dataPoint.HistorianID, dataPoint.Time, state.ActiveDataBlockIndex); } else { // Time to request a brand new data block. dataBlock = m_fat.RequestDataBlock(dataPoint.HistorianID, dataPoint.Time, system.DataBlocksUsed); } if (dataBlock != null) { // Update the total number of data blocks used. if (dataBlock.SlotsUsed == 0 && system.DataBlocksUsed == dataBlock.Index) { system.DataBlocksUsed++; m_intercomFile.Write(1, system); } // Update the active data block index information. state.ActiveDataBlockIndex = dataBlock.Index; } // Keep in-memory reference to the data block for consecutive writes. lock (m_dataBlocks) { m_dataBlocks[dataPoint.HistorianID - 1] = dataBlock; } // Kick-off the rollover preparation when its threshold is reached. if (Statistics.FileUsage >= m_rolloverPreparationThreshold && !File.Exists(StandbyArchiveFileName) && !m_rolloverPreparationThread.IsAlive) { m_rolloverPreparationThread = new Thread(PrepareForRollover); m_rolloverPreparationThread.Priority = ThreadPriority.Lowest; m_rolloverPreparationThread.Start(); } } if (dataBlock != null) { // Write data to the data block. dataBlock.Write(dataPoint); m_fat.DataPointsArchived++; } else { OnFileFull(); // Current file is full. m_fat.DataPointsReceived--; while (true) { Rollover(); // Rollover current file. if (m_rolloverWaitHandle.WaitOne(1, false)) break; // Rollover is successful. } i--; // Process current data point again. system = m_intercomFile.Read(1); // Re-read modified intercom record. continue; } } else { // Data is historic. m_fat.DataPointsReceived--; m_historicDataQueue.Add(dataPoint); OnHistoricDataReceived(dataPoint); } state.ArchivedData = new StateRecordDataPoint(dataPoint); } if (calculateSlopes) { if (state.CurrentData.Time.Value != state.ArchivedData.Time.Value) { state.Slope1 = (state.CurrentData.Value - (state.ArchivedData.Value + compressionLimit)) / (state.CurrentData.Time.Value - state.ArchivedData.Time.Value); state.Slope2 = (state.CurrentData.Value - (state.ArchivedData.Value - compressionLimit)) / (state.CurrentData.Time.Value - state.ArchivedData.Time.Value); } else { state.Slope1 = 0; state.Slope2 = 0; } } state.PreviousData = state.CurrentData; // Write state information to the file. m_stateFile.Write(state.HistorianID, state); // [END] Data archival } } }
/// <summary> /// Reads existing <see cref="ArchiveDataPoint"/>s from the <see cref="ArchiveDataBlock"/>. /// </summary> /// <returns>Returns <see cref="ArchiveDataPoint"/>s from the <see cref="ArchiveDataBlock"/>.</returns> public IEnumerable<IDataPoint> Read() { ArchiveDataPoint dataPoint; lock (m_parent.FileData) { // We'll start reading from where the data block begins. m_parent.FileData.Seek(Location, SeekOrigin.Begin); for (int i = 0; i < Capacity; i++) { // Read the data in the block. m_lastActivityTime = DateTime.Now; m_parent.FileData.Read(m_readBuffer, 0, m_readBuffer.Length); // Attempt to parse archive data point try { dataPoint = new ArchiveDataPoint(m_historianID, m_readBuffer, 0, m_readBuffer.Length); } catch (Exception ex) { dataPoint = null; OnDataReadException(ex); } if (dataPoint != null && !dataPoint.IsEmpty) { // There is data - use it. m_writeCursor = m_parent.FileData.Position; yield return dataPoint; } else { // Data is empty - stop reading. yield break; } } } }
private bool ValidateOutputFormat() { bool valid = false; try { byte[] buffer = null; object[] args = new object[OutputPlainTextDataFormat.Text.Split('{').Where(value => !string.IsNullOrWhiteSpace(value)).Select(value => int.Parse(value.Split(':')[0])).Max() + 1]; ArchiveDataPoint sample = new ArchiveDataPoint(1); for (int i = 0; i < args.Length; i++) { args[i] = sample; } buffer = Encoding.ASCII.GetBytes(string.Format(OutputPlainTextDataFormat.Text, args)); valid = true; } catch (Exception ex) { StringBuilder sb = new StringBuilder(); sb.AppendLine("Invalid Data Output Format: "); sb.AppendLine(ex.Message); sb.AppendLine(); sb.AppendLine("A valid format, for example, is:"); sb.AppendLine("{0:Source}:{1:ID},{2:Name},{3:Synonym1},{4:Time},{5:UnixTime},{6:Value},{7:Quality},{8:Description}"); MessageBox.Show(sb.ToString(), "Invalid Output Data Format", MessageBoxButtons.OK); OutputPlainTextDataFormat.Focus(); } return valid; }