// Static Methods public static DataSeries AddMissingCurrentSeries(MeterInfoDataContext meterInfo, Meter meter, DataGroup dataGroup) { DataSeries missingSeries = null; // Get all necessary voltage and current channels in the proper order List <DataSeries> viSeriesList = Enumerable.Range(VAIndex, IRIndex + 1) .GroupJoin(dataGroup.DataSeries.Where(IsInstantaneous), i => i, GetIndex, (i, series) => series.FirstOrDefault()) .ToList(); // Validate that no more than one current channel is missing if (viSeriesList.Count(series => (object)series == null) > 1) { return(null); } // Attempt to fill in missing current channels // based on the relation IR = IA + IB + IC if ((object)viSeriesList[IAIndex] == null) { missingSeries = viSeriesList[IRIndex]; missingSeries = missingSeries.Add(viSeriesList[IBIndex].Negate()); missingSeries = missingSeries.Add(viSeriesList[ICIndex].Negate()); missingSeries.SeriesInfo = GetSeriesInfo(meterInfo, meter, dataGroup, IAIndex); missingSeries.SeriesInfo.Channel.Line = viSeriesList[IBIndex].SeriesInfo.Channel.Line; viSeriesList[IAIndex] = missingSeries; } else if ((object)viSeriesList[IBIndex] == null) { missingSeries = viSeriesList[IRIndex]; missingSeries = missingSeries.Add(viSeriesList[IAIndex].Negate()); missingSeries = missingSeries.Add(viSeriesList[ICIndex].Negate()); missingSeries.SeriesInfo = GetSeriesInfo(meterInfo, meter, dataGroup, IBIndex); missingSeries.SeriesInfo.Channel.Line = viSeriesList[IAIndex].SeriesInfo.Channel.Line; viSeriesList[IBIndex] = missingSeries; } else if ((object)viSeriesList[ICIndex] == null) { missingSeries = viSeriesList[IRIndex]; missingSeries = missingSeries.Add(viSeriesList[IAIndex].Negate()); missingSeries = missingSeries.Add(viSeriesList[IBIndex].Negate()); missingSeries.SeriesInfo = GetSeriesInfo(meterInfo, meter, dataGroup, ICIndex); missingSeries.SeriesInfo.Channel.Line = viSeriesList[IAIndex].SeriesInfo.Channel.Line; viSeriesList[ICIndex] = missingSeries; } else if ((object)viSeriesList[IRIndex] == null) { missingSeries = viSeriesList[IAIndex]; missingSeries = missingSeries.Add(viSeriesList[IBIndex]); missingSeries = missingSeries.Add(viSeriesList[ICIndex]); missingSeries.SeriesInfo = GetSeriesInfo(meterInfo, meter, dataGroup, IRIndex); missingSeries.SeriesInfo.Channel.Line = viSeriesList[IAIndex].SeriesInfo.Channel.Line; viSeriesList[IRIndex] = missingSeries; } if ((object)missingSeries != null) { dataGroup.Add(missingSeries); } return(missingSeries); }
private double GetMagnitude(DataSeries dataSeries) { double magnitude = double.NaN; foreach (DataPoint dataPoint in dataSeries.DataPoints) { if (double.IsNaN(magnitude) || m_isMoreSevere(dataPoint.Value, magnitude)) magnitude = dataPoint.Value; } return magnitude; }
private DataSeries ToPerUnit(DataSeries rms) { double nominalValue = rms?.SeriesInfo.Channel.PerUnitValue ?? GetLineVoltage(rms); if (nominalValue == 0.0D) return null; return rms?.Multiply(1.0D / nominalValue); }
public Curve() { m_series = new DataSeries(); }
private List<Range<int>> DetectDisturbanceRanges(DataSeries rms) { int samplesPerCycle; List<Range<int>> disturbanceRanges; int start; int end; if ((object)rms == null) return new List<Range<int>>(); // Determine the sample rate so that we can exclude // disturbances that are shorter than half a cycle samplesPerCycle = Transform.CalculateSamplesPerCycle(rms, m_systemFrequency); // Sample rate of zero is invalid if (samplesPerCycle == 0) return new List<Range<int>>(); // Initialize disturbanceRanges and the start index disturbanceRanges = new List<Range<int>>(); start = 0; // Iterate over all data points in the RMS series while (start < rms.DataPoints.Count) { // If the data point at the start index was measured during // disturbed conditions, attempt to locate the end of the disturbance if (m_isDisturbed(rms[start])) { // Initialize the end index to the data // point just past the start of the disturbance end = start + 1; // Don't iterate beyond the end of the data series while (end < rms.DataPoints.Count) { // If the data point at the end index was not measured during // the disturbance, the end of the disturbance has been found if (!m_isDisturbed(rms[end])) break; // Increment the end index end++; } // If the disturbance lasted for at least half a cycle, // store it in the list of disturbance ranges if ((end - start) > (samplesPerCycle / 2)) disturbanceRanges.Add(new Range<int>(start, end - 1)); // Move the start index up // to the end of the disturbance start = end; } // Increment the start index start++; } return disturbanceRanges; }
public DataSeries Multiply(double value) { DataSeries result = new DataSeries(); result.DataPoints = m_dataPoints .Select(point => point.Multiply(value)) .ToList(); return result; }
public DataSeries Subtract(DataSeries operand) { return Add(operand.Negate()); }
private static string GetOriginalChannelName(Schema originalSchema, DataSeries series) { int index; return ((object)originalSchema != null && int.TryParse(series.SeriesInfo.SourceIndexes, out index)) ? originalSchema.AnalogChannels[Math.Abs(index) - 1].Name : series.SeriesInfo.Channel.Name; }
/// <summary> /// Adds a channel to the group of data. /// </summary> /// <param name="dataSeries">The channel to be added to the group.</param> /// <returns> /// True if the channel was successfully added. False if the channel was excluded /// because the channel does not match the other channels already in the data group. /// </returns> public bool Add(DataSeries dataSeries) { Line line; DateTime startTime; DateTime endTime; int samples; // Unable to add null data series if ((object)dataSeries == null) { return(false); } // Data series without data is irrelevant to data grouping if (!dataSeries.DataPoints.Any()) { return(false); } // Do not add the same data series twice if (m_dataSeries.Contains(dataSeries)) { return(false); } // Get information about the line this data is associated with if ((object)dataSeries.SeriesInfo != null) { line = dataSeries.SeriesInfo.Channel.Line; } else { line = null; } // Get the start time, end time, and number of samples // for the data series passed into this function startTime = dataSeries.DataPoints[0].Time; endTime = dataSeries.DataPoints[dataSeries.DataPoints.Count - 1].Time; samples = dataSeries.DataPoints.Count; // If there are any disturbances in this data group that do not overlap // with the data series, do not include the data series in the data group if (m_disturbances.Select(disturbance => disturbance.ToRange()).Any(range => range.Start > endTime || range.End < startTime)) { return(false); } // If there are any disturbances associated with the data in this group and the data // to be added is trending data, do not include the trending data in the data group if (m_disturbances.Any() && CalculateSamplesPerMinute(startTime, endTime, samples) <= TrendThreshold) { return(false); } // At this point, if there is no existing data in the data // group, add the data as the first series in the data group if (m_dataSeries.Count == 0) { m_line = line; m_startTime = startTime; m_endTime = endTime; m_samples = samples; m_dataSeries.Add(dataSeries); m_classification = DataClassification.Unknown; return(true); } // If the data being added matches the parameters for this data group, add the data to the data group if (line == m_line && startTime == m_startTime && endTime == m_endTime && samples == m_samples) { m_dataSeries.Add(dataSeries); return(true); } return(false); }
private List <Range <int> > DetectDisturbanceRanges(DataSeries rms) { int samplesPerCycle; List <Range <int> > disturbanceRanges; int start; int end; if ((object)rms == null) { return(new List <Range <int> >()); } // Determine the sample rate so that we can exclude // disturbances that are shorter than half a cycle samplesPerCycle = Transform.CalculateSamplesPerCycle(rms, m_systemFrequency); // Sample rate of zero is invalid if (samplesPerCycle == 0) { return(new List <Range <int> >()); } // Initialize disturbanceRanges and the start index disturbanceRanges = new List <Range <int> >(); start = 0; // Iterate over all data points in the RMS series while (start < rms.DataPoints.Count) { // If the data point at the start index was measured during // disturbed conditions, attempt to locate the end of the disturbance if (m_isDisturbed(rms[start])) { // Initialize the end index to the data // point just past the start of the disturbance end = start + 1; // Don't iterate beyond the end of the data series while (end < rms.DataPoints.Count) { // If the data point at the end index was not measured during // the disturbance, the end of the disturbance has been found if (!m_isDisturbed(rms[end])) { break; } // Increment the end index end++; } // If the disturbance lasted for at least half a cycle, // store it in the list of disturbance ranges if ((end - start) > (samplesPerCycle / 2)) { disturbanceRanges.Add(new Range <int>(start, end - 1)); } // Move the start index up // to the end of the disturbance start = end; } // Increment the start index start++; } return(disturbanceRanges); }
public DataSeries Subtract(DataSeries operand) { return(Add(operand.Negate())); }
public static CycleDataGroup ToCycleDataGroup(DataSeries dataSeries, double frequency) { DataGroup dataGroup = new DataGroup(); DataSeries rmsSeries = new DataSeries(); DataSeries phaseSeries = new DataSeries(); DataSeries peakSeries = new DataSeries(); DataSeries errorSeries = new DataSeries(); int samplesPerCycle; double[] yValues; double[] tValues; double sum; DateTime cycleTime; SineWave sineFit; if ((object)dataSeries == null) { return(new CycleDataGroup(dataGroup)); } // Set series info to the source series info rmsSeries.SeriesInfo = dataSeries.SeriesInfo; phaseSeries.SeriesInfo = dataSeries.SeriesInfo; peakSeries.SeriesInfo = dataSeries.SeriesInfo; errorSeries.SeriesInfo = dataSeries.SeriesInfo; // Get samples per cycle of the data series based on the given frequency samplesPerCycle = CalculateSamplesPerCycle(dataSeries, frequency); // Initialize arrays of y-values and t-values for calculating cycle data yValues = new double[samplesPerCycle]; tValues = new double[samplesPerCycle]; for (int i = 0; i <= dataSeries.DataPoints.Count - samplesPerCycle; i++) { // Use the time of the first data point in the cycle as the time of the cycle cycleTime = dataSeries.DataPoints[i].Time; sum = 0.0D; // Copy values from the original data series into the y-value and t-value arrays for (int j = 0; j < samplesPerCycle; j++) { yValues[j] = dataSeries.DataPoints[i + j].Value; tValues[j] = (dataSeries.DataPoints[i + j].Time - cycleTime).TotalSeconds; sum += yValues[j] * yValues[j]; } // Use a curve fitting algorithm to estimate the sine wave over this cycle sineFit = WaveFit.SineFit(yValues, tValues, frequency); // Add data points to each of the cycle data series rmsSeries.DataPoints.Add(new DataPoint() { Time = cycleTime, Value = Math.Sqrt(sum / samplesPerCycle) }); phaseSeries.DataPoints.Add(new DataPoint() { Time = cycleTime, Value = sineFit.Phase }); peakSeries.DataPoints.Add(new DataPoint() { Time = cycleTime, Value = sineFit.Amplitude }); errorSeries.DataPoints.Add(new DataPoint() { Time = cycleTime, Value = tValues .Select(sineFit.CalculateY) .Zip(yValues, (estimate, value) => Math.Abs(estimate - value)) .Sum() }); } // Add a series to the data group for each series of cycle data dataGroup.Add(rmsSeries); dataGroup.Add(phaseSeries); dataGroup.Add(peakSeries); dataGroup.Add(errorSeries); return(new CycleDataGroup(dataGroup)); }
private static int CalculateSamplesPerCycle(DataSeries dataSeries, double frequency) { return((int)Math.Round(dataSeries.SampleRate / frequency)); }
public static List <double> ToValues(DataSeries series) { return(series.DataPoints .Select(dataPoint => dataPoint.Value) .ToList()); }
public static CycleDataGroup ToCycleDataGroup(DataSeries dataSeries, double frequency) { DataGroup dataGroup = new DataGroup(); DataSeries rmsSeries = new DataSeries(); DataSeries phaseSeries = new DataSeries(); DataSeries peakSeries = new DataSeries(); DataSeries errorSeries = new DataSeries(); int samplesPerCycle; double[] yValues; double[] tValues; double sum; DateTime cycleTime; SineWave sineFit; if ((object)dataSeries == null) return null; // Set series info to the source series info rmsSeries.SeriesInfo = dataSeries.SeriesInfo; phaseSeries.SeriesInfo = dataSeries.SeriesInfo; peakSeries.SeriesInfo = dataSeries.SeriesInfo; errorSeries.SeriesInfo = dataSeries.SeriesInfo; // Get samples per cycle of the data series based on the given frequency samplesPerCycle = CalculateSamplesPerCycle(dataSeries, frequency); // Initialize arrays of y-values and t-values for calculating cycle data yValues = new double[samplesPerCycle]; tValues = new double[samplesPerCycle]; // Obtain a list of time gaps in the data series List<int> gapIndexes = Enumerable.Range(0, dataSeries.DataPoints.Count - 1) .Where(index => { DataPoint p1 = dataSeries[index]; DataPoint p2 = dataSeries[index + 1]; double cycleDiff = (p2.Time - p1.Time).TotalSeconds * frequency; return (cycleDiff >= 0.25); }) .ToList(); for (int i = 0; i <= dataSeries.DataPoints.Count - samplesPerCycle; i++) { // If the cycle following i contains a data gap, do not calculate cycle data if (gapIndexes.Any(index => i <= index && (i + samplesPerCycle - 1) > index)) continue; // Use the time of the first data point in the cycle as the time of the cycle cycleTime = dataSeries.DataPoints[i].Time; sum = 0.0D; // Copy values from the original data series into the y-value and t-value arrays for (int j = 0; j < samplesPerCycle; j++) { yValues[j] = dataSeries.DataPoints[i + j].Value; tValues[j] = (dataSeries.DataPoints[i + j].Time - cycleTime).TotalSeconds; sum += yValues[j] * yValues[j]; } // Use a curve fitting algorithm to estimate the sine wave over this cycle sineFit = WaveFit.SineFit(yValues, tValues, frequency); // Add data points to each of the cycle data series rmsSeries.DataPoints.Add(new DataPoint() { Time = cycleTime, Value = Math.Sqrt(sum / samplesPerCycle) }); phaseSeries.DataPoints.Add(new DataPoint() { Time = cycleTime, Value = sineFit.Phase }); peakSeries.DataPoints.Add(new DataPoint() { Time = cycleTime, Value = sineFit.Amplitude }); errorSeries.DataPoints.Add(new DataPoint() { Time = cycleTime, Value = tValues .Select(sineFit.CalculateY) .Zip(yValues, (estimate, value) => Math.Abs(estimate - value)) .Sum() }); } // Add a series to the data group for each series of cycle data dataGroup.Add(rmsSeries); dataGroup.Add(phaseSeries); dataGroup.Add(peakSeries); dataGroup.Add(errorSeries); return new CycleDataGroup(dataGroup); }
public void FromData(Meter meter, byte[] data) { // If the blob contains the GZip header, // use the legacy deserialization algorithm if (data[0] == 0x1F && data[1] == 0x8B) { FromData_Legacy(meter, data); return; } // Restore the GZip header before uncompressing data[0] = 0x1F; data[1] = 0x8B; byte[] uncompressedData = GZipStream.UncompressBuffer(data); int offset = 0; m_samples = LittleEndian.ToInt32(uncompressedData, offset); offset += sizeof(int); List <DateTime> times = new List <DateTime>(); while (times.Count < m_samples) { int timeValues = LittleEndian.ToInt32(uncompressedData, offset); offset += sizeof(int); long currentValue = LittleEndian.ToInt64(uncompressedData, offset); offset += sizeof(long); times.Add(new DateTime(currentValue)); for (int i = 1; i < timeValues; i++) { currentValue += LittleEndian.ToUInt16(uncompressedData, offset); offset += sizeof(ushort); times.Add(new DateTime(currentValue)); } } while (offset < uncompressedData.Length) { DataSeries dataSeries = new DataSeries(); int seriesID = LittleEndian.ToInt32(uncompressedData, offset); offset += sizeof(int); if (seriesID > 0 && (object)meter != null) { dataSeries.SeriesInfo = GetSeriesInfo(meter, seriesID); } const ushort NaNValue = ushort.MaxValue; double decompressionOffset = LittleEndian.ToDouble(uncompressedData, offset); double decompressionScale = LittleEndian.ToDouble(uncompressedData, offset + sizeof(double)); offset += 2 * sizeof(double); for (int i = 0; i < m_samples; i++) { ushort compressedValue = LittleEndian.ToUInt16(uncompressedData, offset); offset += sizeof(ushort); double decompressedValue = decompressionScale * compressedValue + decompressionOffset; if (compressedValue == NaNValue) { decompressedValue = double.NaN; } dataSeries.DataPoints.Add(new DataPoint() { Time = times[i], Value = decompressedValue }); } Add(dataSeries); } }
public DataSeries Add(DataSeries operand) { DataSeries sum = new DataSeries(); if (m_dataPoints.Count != operand.DataPoints.Count) throw new InvalidOperationException("Cannot take the sum of series with mismatched time values"); sum.DataPoints = m_dataPoints .Zip(operand.DataPoints, Add) .ToList(); return sum; }
private string GetMeasurementType(DataSeries dataSeries) { return(dataSeries.SeriesInfo.Channel.MeasurementType.Name); }
public DataSeries Negate() { DataSeries negatedDataSeries = new DataSeries(); negatedDataSeries.DataPoints = m_dataPoints .Select(point => point.Negate()) .ToList(); return negatedDataSeries; }
private string GetPhase(DataSeries dataSeries) { return(dataSeries.SeriesInfo.Channel.Phase.Name); }
public DataSeries ToSubSeries(int startIndex, int endIndex) { DataSeries subSeries = new DataSeries(); int count; subSeries.SeriesInfo = m_seriesInfo; if (startIndex < 0) startIndex = 0; if (endIndex >= m_dataPoints.Count) endIndex = m_dataPoints.Count - 1; count = endIndex - startIndex + 1; if (count > 0) subSeries.DataPoints = m_dataPoints.Skip(startIndex).Take(count).ToList(); return subSeries; }
public void Parse(string filePath) { ControlFile controlFile; string identityString; string deviceName; Channel channel; DataSeries series; List<ANLG_CHNL_NEW> analogChannels; controlFile = m_parser.ControlFile; identityString = controlFile.IdentityString.value; deviceName = identityString.Substring(0, IndexOf(identityString, "\r\n", "\n", "\r")); m_meterDataSet.Meter = new Meter(); m_meterDataSet.Meter.AssetKey = deviceName; m_meterDataSet.Meter.Name = deviceName; m_meterDataSet.Meter.ShortName = deviceName.Substring(0, Math.Min(deviceName.Length, 50)); analogChannels = controlFile.AnalogChannelSettings .OrderBy(kvp => kvp.Key) .Select(kvp => kvp.Value) .ToList(); // Add an empty data series for 1-based indexing m_meterDataSet.DataSeries.Add(new DataSeries()); foreach (ANLG_CHNL_NEW analogChannel in analogChannels) { channel = ParseSeries(analogChannel); channel.Series.Single().SourceIndexes = m_meterDataSet.DataSeries.Count.ToString(); series = new DataSeries(); series.SeriesInfo = channel.Series[0]; m_meterDataSet.Meter.Channels.Add(channel); m_meterDataSet.DataSeries.Add(series); } while (m_parser.ReadNext()) { for (int i = 0; i < analogChannels.Count; i++) m_meterDataSet.DataSeries[i + 1].DataPoints.Add(new DataPoint() { Time = m_parser.CalculatedTimestamp, Value = m_parser.CorrectedValues[i] }); } }
public void Parse(string filePath) { Schema schema; Channel channel; DataSeries series; schema = m_parser.Schema; m_meterDataSet.Meter = new Meter(); m_meterDataSet.Meter.AssetKey = schema.DeviceID; m_meterDataSet.Meter.Name = schema.DeviceID; m_meterDataSet.Meter.ShortName = schema.DeviceID.Substring(0, Math.Min(schema.DeviceID.Length, 50)); m_meterDataSet.Meter.MeterLocation = new MeterLocation(); m_meterDataSet.Meter.MeterLocation.AssetKey = schema.StationName; m_meterDataSet.Meter.MeterLocation.Name = schema.StationName; m_meterDataSet.Meter.MeterLocation.ShortName = schema.StationName.Substring(0, Math.Min(schema.StationName.Length, 50)); m_meterDataSet.Meter.MeterLocation.Description = schema.StationName; foreach (AnalogChannel analogChannel in schema.AnalogChannels) { channel = ParseSeries(analogChannel); series = new DataSeries(); series.SeriesInfo = channel.Series[0]; m_meterDataSet.Meter.Channels.Add(channel); while (m_meterDataSet.DataSeries.Count <= analogChannel.Index) m_meterDataSet.DataSeries.Add(new DataSeries()); m_meterDataSet.DataSeries[analogChannel.Index] = series; } foreach (DigitalChannel digitalChannel in schema.DigitalChannels) { channel = ParseSeries(digitalChannel); series = new DataSeries(); series.SeriesInfo = channel.Series[0]; m_meterDataSet.Meter.Channels.Add(channel); while (m_meterDataSet.Digitals.Count <= digitalChannel.Index) m_meterDataSet.Digitals.Add(new DataSeries()); m_meterDataSet.Digitals[digitalChannel.Index] = series; } try { while (m_parser.ReadNext()) { for (int i = 0; i < schema.AnalogChannels.Length; i++) { int seriesIndex = schema.AnalogChannels[i].Index; string units = schema.AnalogChannels[i].Units.ToUpper(); double multiplier = (units.Contains("KA") || units.Contains("KV")) ? 1000.0D : 1.0D; m_meterDataSet.DataSeries[seriesIndex].DataPoints.Add(new DataPoint() { Time = m_parser.Timestamp, Value = multiplier * m_parser.PrimaryValues[i] }); } for (int i = 0; i < schema.DigitalChannels.Length; i++) { int valuesIndex = schema.TotalAnalogChannels + i; int seriesIndex = schema.DigitalChannels[i].Index; m_meterDataSet.Digitals[seriesIndex].DataPoints.Add(new DataPoint() { Time = m_parser.Timestamp, Value = m_parser.Values[valuesIndex] }); } } } catch (InvalidOperationException ex) { Log.Warn(ex.Message, ex); } }
/// <summary> /// FixFaultCurve /// </summary> /// <param name="faultCurve"></param> /// <param name="line"></param> private void FixFaultCurve(DataSeries faultCurve, Line line) { double maxFaultDistance = MaxFaultDistanceMultiplier * line.Length; double minFaultDistance = MinFaultDistanceMultiplier * line.Length; foreach (DataPoint dataPoint in faultCurve.DataPoints) { if (double.IsNaN(dataPoint.Value)) dataPoint.Value = 0.0D; else if (dataPoint.Value > maxFaultDistance) dataPoint.Value = maxFaultDistance; else if (dataPoint.Value < minFaultDistance) dataPoint.Value = minFaultDistance; } }
private double GetLineVoltage(DataSeries rms) { double lineVoltage = rms?.SeriesInfo.Channel.Line.VoltageKV ?? 0.0D; if (new string[] { "AN", "BN", "CN" }.Contains(rms?.SeriesInfo.Channel.Phase.Name)) lineVoltage /= Math.Sqrt(3.0D); return lineVoltage * 1000.0D; }
public static List<double> ToValues(DataSeries series) { return series.DataPoints .Select(dataPoint => dataPoint.Value) .ToList(); }
private Disturbance ToDisturbance(DataSeries rms, Range<int> range, Phase phase) { Disturbance disturbance = new Disturbance(); disturbance.EventType = m_eventType; disturbance.Phase = phase; disturbance.StartIndex = range.Start; disturbance.EndIndex = range.End; disturbance.StartTime = rms[range.Start].Time; disturbance.EndTime = rms[range.End].Time; disturbance.Magnitude = GetMagnitude(rms.ToSubSeries(range.Start, range.End)); disturbance.PerUnitMagnitude = disturbance.Magnitude / rms.SeriesInfo.Channel.PerUnitValue.GetValueOrDefault(); return disturbance; }
public static int CalculateSamplesPerCycle(DataSeries dataSeries, double frequency) { return CalculateSamplesPerCycle(dataSeries.SampleRate, frequency); }
/// <summary> /// Parses the file into a meter data set per meter contained in the file. /// </summary> /// <param name="filePath">The path to the file to be parsed.</param> /// <returns>List of meter data sets, one per meter.</returns> public void Parse(string filePath) { List<DataSourceRecord> dataSources; List<ObservationRecord> observationRecords; List<ChannelInstance> channelInstances; List<SeriesInstance> seriesInstances; List<SeriesDefinition> seriesDefinitions; Meter meter; Channel channel; DataSeries dataSeries; DateTime[] timeData; // Build the list of observation records in the PQDIF file observationRecords = new List<ObservationRecord>(); while (m_parser.HasNextObservationRecord()) observationRecords.Add(m_parser.NextObservationRecord()); // Build the list of all data source records in the PQDIF file dataSources = observationRecords .Select(observation => observation.DataSource) .Distinct() .ToList(); // If there are no data sources, there is no // need to go any further because we won't be // able to interpret any of the channel data if (!dataSources.Any()) return; // Validate data sources to make sure there is only one data source defined in the file if (!dataSources.Zip(dataSources.Skip(1), (ds1, ds2) => AreEquivalent(ds1, ds2)).All(b => b)) throw new InvalidDataException($"PQDIF file \"{filePath}\" defines too many data sources."); // Create a meter from the parsed data source meter = ParseDataSource(dataSources.First()); m_meterDataSet.Meter = meter; // Build the list of all channel instances in the PQDIF file channelInstances = observationRecords .SelectMany(observation => observation.ChannelInstances) .Where(channelInstance => QuantityType.IsQuantityTypeID(channelInstance.Definition.QuantityTypeID)) .Where(channelInstance => channelInstance.SeriesInstances.Any()) .Where(channelInstance => channelInstance.SeriesInstances[0].Definition.ValueTypeID == SeriesValueType.Time) .ToList(); // Create the list of series instances so we can // build it as we process each channel instance seriesInstances = new List<SeriesInstance>(); foreach (ChannelInstance channelInstance in channelInstances) { bool timeValueChannel = channelInstance.Definition.QuantityTypeID == QuantityType.WaveForm || channelInstance.Definition.QuantityTypeID == QuantityType.ValueLog || channelInstance.Definition.QuantityTypeID == QuantityType.Phasor || channelInstance.Definition.QuantityTypeID == QuantityType.Flash || channelInstance.Definition.QuantityTypeID == QuantityType.MagDurTime || channelInstance.Definition.QuantityTypeID == QuantityType.MagDurCount; // TODO: Create representation for quantity types that do not define time/value data if (!timeValueChannel) continue; // Parse time data from the channel instance timeData = ParseTimeData(channelInstance); foreach (SeriesInstance seriesInstance in channelInstance.SeriesInstances.Skip(1)) { // Create a channel from the parsed series instance seriesInstances.Add(seriesInstance); channel = ParseSeries(seriesInstance); // Parse the values and zip them with time data to create data points dataSeries = new DataSeries(); dataSeries.DataPoints = timeData.Zip(ParseValueData(seriesInstance), (time, d) => new DataPoint() { Time = time, Value = d }).ToList(); dataSeries.SeriesInfo = channel.Series[0]; // Add the new channel to the meter's channel list meter.Channels.Add(channel); m_meterDataSet.DataSeries.Add(dataSeries); } } // Build a list of series definitions that were not instanced by this PQDIF file seriesDefinitions = dataSources .SelectMany(dataSource => dataSource.ChannelDefinitions) .SelectMany(channelDefinition => channelDefinition.SeriesDefinitions) .Distinct() .Except(seriesInstances.Select(seriesInstance => seriesInstance.Definition)) .ToList(); // Add each of the series definitions which were not instanced to the meter's list of channels foreach (SeriesDefinition seriesDefinition in seriesDefinitions) meter.Channels.Add(ParseSeries(seriesDefinition)); }
/// <summary> /// Parses the file into a meter data set per meter contained in the file. /// </summary> /// <param name="filePath">The path to the file to be parsed.</param> /// <returns>List of meter data sets, one per meter.</returns> public void Parse(string filePath) { Header header; Channel channel; DataSeries series; List<DateTime> timeSamples; List<double> valueSamples; if ((object)m_eventFile == null) m_eventFile = EventFile.Parse(filePath); if (!m_eventFile.EventReports.Any() && !m_eventFile.CommaSeparatedEventReports.Any()) return; header = m_eventFile.EventReports.FirstOrDefault()?.Header ?? m_eventFile.CommaSeparatedEventReports[0].Header; m_meterDataSet.Meter = new Meter(); m_meterDataSet.Meter.AssetKey = header.RelayID; m_meterDataSet.Meter.Name = header.RelayID; m_meterDataSet.Meter.ShortName = new string(header.RelayID.ToNonNullString().Take(50).ToArray()); m_meterDataSet.Meter.MeterLocation = new MeterLocation(); m_meterDataSet.Meter.MeterLocation.AssetKey = header.StationID; m_meterDataSet.Meter.MeterLocation.Name = header.StationID; m_meterDataSet.Meter.MeterLocation.ShortName = new string(header.StationID.ToNonNullString().Take(50).ToArray()); m_meterDataSet.Meter.MeterLocation.Description = header.StationID; foreach (EventReport report in m_eventFile.EventReports) { for (int i = 0; i < report.AnalogSection.AnalogChannels.Count; i++) { channel = MakeParsedAnalog(report, i); series = new DataSeries(); timeSamples = report.AnalogSection.TimeChannel.Samples; valueSamples = report.AnalogSection.AnalogChannels[i].Samples; series.DataPoints = timeSamples .Zip(valueSamples, (time, value) => new DataPoint() { Time = time, Value = value }) .ToList(); if (new string[] { "VA", "VB", "VC", "VS" }.Contains(report.AnalogSection.AnalogChannels[i].Name)) series = series.Multiply(1000.0D); series.SeriesInfo = channel.Series[0]; m_meterDataSet.DataSeries.Add(series); } for (int i = 0; i < report.AnalogSection.DigitalChannels.Count; i++) { channel = MakeParsedDigital(report, i); series = new DataSeries(); if (channel.Name == "*") continue; timeSamples = report.AnalogSection.TimeChannel.Samples; valueSamples = report.AnalogSection.DigitalChannels[i].Samples.Select(Convert.ToDouble).ToList(); series.SeriesInfo = channel.Series[0]; series.DataPoints = timeSamples .Zip(valueSamples, (time, value) => new DataPoint() { Time = time, Value = value }) .ToList(); m_meterDataSet.Digitals.Add(series); } ComplexNumber z1 = new ComplexNumber(0.0D, 0.0D); ComplexNumber z0 = new ComplexNumber(0.0D, 0.0D); double groupSetting; if (double.TryParse(report.GetGroupSettings("Z1MAG"), out groupSetting)) z1.Magnitude = groupSetting; if (double.TryParse(report.GetGroupSettings("Z1ANG"), out groupSetting)) z1.Angle = groupSetting; if (double.TryParse(report.GetGroupSettings("Z0MAG"), out groupSetting)) z0.Magnitude = groupSetting; if (double.TryParse(report.GetGroupSettings("Z0ANG"), out groupSetting)) z0.Angle = groupSetting; if (z1 != z0) { m_meterDataSet.Configuration.R1 = z1.Real; m_meterDataSet.Configuration.X1 = z1.Imaginary; m_meterDataSet.Configuration.R0 = z0.Real; m_meterDataSet.Configuration.X0 = z0.Imaginary; if (double.TryParse(report.GetGroupSettings("LL"), out groupSetting)) m_meterDataSet.Configuration.LineLength = groupSetting; } } foreach (CommaSeparatedEventReport report in m_eventFile.CommaSeparatedEventReports) { for (int i = 0; i < report.AnalogSection.AnalogChannels.Count; i++) { channel = MakeParsedAnalog(report, i); series = new DataSeries(); timeSamples = report.AnalogSection.TimeChannel.Samples; valueSamples = report.AnalogSection.AnalogChannels[i].Samples; series.DataPoints = timeSamples .Zip(valueSamples, (time, value) => new DataPoint() { Time = time, Value = value }) .ToList(); series.SeriesInfo = channel.Series[0]; m_meterDataSet.DataSeries.Add(series); } for (int i = 0; i < report.AnalogSection.DigitalChannels.Count; i++) { channel = MakeParsedDigital(report, i); series = new DataSeries(); if (channel.Name == "*") continue; timeSamples = report.AnalogSection.TimeChannel.Samples; valueSamples = report.AnalogSection.DigitalChannels[i].Samples.Select(Convert.ToDouble).ToList(); series.SeriesInfo = channel.Series[0]; series.DataPoints = timeSamples .Zip(valueSamples, (time, value) => new DataPoint() { Time = time, Value = value }) .ToList(); m_meterDataSet.Digitals.Add(series); } } }