private List <MeterDataSet> LoadMeterDataSets(DbAdapterContainer dbAdapterContainer, FileGroup fileGroup) { List <MeterDataSet> meterDataSets = new List <MeterDataSet>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter <MeterInfoDataContext>(); EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter <EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = dbAdapterContainer.GetAdapter <EventDataTableAdapter>(); MeterData.EventDataTable eventTable = eventAdapter.GetDataByFileGroup(fileGroup.ID); MeterDataSet meterDataSet; DataGroup dataGroup; foreach (IGrouping <int, MeterData.EventRow> eventGroup in eventTable.GroupBy(evt => evt.MeterID)) { meterDataSet = new MeterDataSet(); meterDataSet.Meter = meterInfo.Meters.SingleOrDefault(meter => meter.ID == eventGroup.Key); foreach (MeterData.EventRow evt in eventGroup) { dataGroup = new DataGroup(); dataGroup.FromData(meterDataSet.Meter, eventDataAdapter.GetTimeDomainData(evt.EventDataID)); foreach (DataSeries dataSeries in dataGroup.DataSeries) { meterDataSet.DataSeries.Add(dataSeries); } } meterDataSets.Add(meterDataSet); } return(meterDataSets); }
private Dictionary <BreakerOperationType, int> GetBreakerOperationTypeLookup(DbAdapterContainer dbAdapterContainer) { MeterData.BreakerOperationTypeDataTable breakerOperationTypeTable = new MeterData.BreakerOperationTypeDataTable(); BreakerOperationType breakerOperationType = default(BreakerOperationType); foreach (BreakerOperationType operationType in Enum.GetValues(typeof(BreakerOperationType))) { breakerOperationTypeTable.AddBreakerOperationTypeRow(operationType.ToString(), operationType.ToString()); } BulkLoader bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " + "USING {1} AS Source " + "ON Source.Name = Target.Name " + "WHEN NOT MATCHED THEN " + " INSERT (Name, Description) " + " VALUES (Source.Name, Source.Description);"; bulkLoader.Load(breakerOperationTypeTable); dbAdapterContainer.GetAdapter <BreakerOperationTypeTableAdapter>().Fill(breakerOperationTypeTable); return(breakerOperationTypeTable .Where(row => Enum.TryParse(row.Name, out breakerOperationType)).Select(row => Tuple.Create(breakerOperationType, row.ID)) .ToDictionary(tuple => tuple.Item1, tuple => tuple.Item2)); }
public override void Initialize(MeterDataSet meterDataSet) { MeterInfoDataContext meterInfo = m_dbAdapterContainer.GetAdapter <MeterInfoDataContext>(); DataGroupsResource dataGroupsResource = meterDataSet.GetResource <DataGroupsResource>(); Stopwatch stopwatch = new Stopwatch(); m_dataGroups = dataGroupsResource.DataGroups .Where(dataGroup => dataGroup.Classification == DataClassification.Event) .Where(dataGroup => dataGroup.SamplesPerSecond / m_systemFrequency >= 3.999D) .ToList(); Log.Info(string.Format("Found data for {0} events.", m_dataGroups.Count)); m_viDataGroups = m_dataGroups .Select(dataGroup => { VIDataGroup viDataGroup = new VIDataGroup(dataGroup); dataGroup.Add(viDataGroup.CalculateMissingCurrentChannel(meterInfo)); return(viDataGroup); }) .ToList(); Log.Info(string.Format("Calculating cycle data for all {0} events.", m_dataGroups.Count)); stopwatch.Start(); m_viCycleDataGroups = m_viDataGroups .Select(viDataGroup => Transform.ToVICycleDataGroup(viDataGroup, m_systemFrequency)) .ToList(); Log.Debug(string.Format("Cycle data calculated in {0}.", stopwatch.Elapsed)); }
public override void Load(DbAdapterContainer dbAdapterContainer) { MeterData.EventDataTable eventTable; Dictionary <EventKey, MeterData.EventRow> eventLookup; MeterData.EventRow eventRow; BulkLoader bulkLoader; eventTable = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(m_meterDataSet.FileGroup.ID); eventLookup = eventTable.Where(evt => evt.MeterID == m_meterDataSet.Meter.ID).ToDictionary(CreateEventKey); foreach (Tuple <EventKey, MeterData.BreakerOperationRow> breakerOperation in m_breakerOperations) { if (eventLookup.TryGetValue(breakerOperation.Item1, out eventRow)) { breakerOperation.Item2.EventID = eventRow.ID; m_breakerOperationTable.AddBreakerOperationRow(breakerOperation.Item2); } } bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.Load(m_breakerOperationTable); }
public void ProcessMeterDataSets(List <MeterDataSet> meterDataSets, SystemSettings systemSettings, DbAdapterContainer dbAdapterContainer) { TimeZoneInfo xdaTimeZone; DateTime processingEndTime; try { foreach (MeterDataSet meterDataSet in meterDataSets) { ProcessMeterData(meterDataSet, dbAdapterContainer); } xdaTimeZone = systemSettings.XDATimeZoneInfo; processingEndTime = TimeZoneInfo.ConvertTimeFromUtc(DateTime.UtcNow, xdaTimeZone); foreach (MeterDataSet meterDataSet in meterDataSets) { meterDataSet.FileGroup.ProcessingEndTime = processingEndTime; } dbAdapterContainer.GetAdapter <FileInfoDataContext>().SubmitChanges(); } catch (Exception ex) { OnHandleException(ex); } }
private Dictionary <EventClassification, int> GetEventTypeLookup(DbAdapterContainer dbAdapterContainer) { MeterData.EventTypeDataTable eventTypeTable = new MeterData.EventTypeDataTable(); EventClassification eventClassification = default(EventClassification); foreach (EventClassification classification in Enum.GetValues(typeof(EventClassification))) { eventTypeTable.AddEventTypeRow(classification.ToString(), classification.ToString()); } BulkLoader bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " + "USING {1} AS Source " + "ON Source.Name = Target.Name " + "WHEN NOT MATCHED THEN " + " INSERT (Name, Description) " + " VALUES (Source.Name, Source.Description);"; bulkLoader.Load(eventTypeTable); dbAdapterContainer.GetAdapter <EventTypeTableAdapter>().Fill(eventTypeTable); return(Enumerable.Select(eventTypeTable .Where(row => Enum.TryParse(row.Name, out eventClassification)), row => Tuple.Create(eventClassification, row.ID)) .ToDictionary(tuple => tuple.Item1, tuple => tuple.Item2)); }
private VIDataGroup GetVIDataGroup() { MeterInfoDataContext meterInfo = m_dbAdapterContainer.GetAdapter <MeterInfoDataContext>(); EventTableAdapter eventAdapter = m_dbAdapterContainer.GetAdapter <EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = m_dbAdapterContainer.GetAdapter <EventDataTableAdapter>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(m_eventID)[0]; MeterData.EventDataRow eventDataRow = eventDataAdapter.GetDataBy(m_eventID)[0]; Meter meter = meterInfo.Meters.Single(m => m.ID == eventRow.MeterID); DataGroup dataGroup = new DataGroup(); dataGroup.FromData(meter, eventDataRow.TimeDomainData); return(new VIDataGroup(dataGroup)); }
public override void Prepare(DbAdapterContainer dbAdapterContainer) { m_alarmTypeTable = new AlarmData.AlarmTypeDataTable(); m_alarmLogTable = new AlarmData.AlarmLogDataTable(); dbAdapterContainer.GetAdapter <AlarmTypeTableAdapter>().Fill(m_alarmTypeTable); m_dbAdapterContainer = dbAdapterContainer; }
public void FillTables(DbAdapterContainer dbAdapterContainer) { MeterData.EventDataTable eventTable; Dictionary <EventKey, MeterData.EventRow> eventLookup; MeterData.EventRow eventRow; eventTable = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(MeterDataSet.FileGroup.ID); eventLookup = eventTable .Where(evt => evt.MeterID == MeterDataSet.Meter.ID) .GroupBy(CreateEventKey) .ToDictionary(grouping => grouping.Key, grouping => { if (grouping.Count() > 1) { Log.Warn($"Duplicate event found for meter {MeterDataSet.Meter.AssetKey}: {string.Join(", ", grouping.Select(evt => evt.ID))}"); } return(grouping.First()); }); foreach (Tuple <EventKey, FaultLocationData.FaultGroupRow> faultGroup in m_faultGroupList) { if (eventLookup.TryGetValue(faultGroup.Item1, out eventRow)) { faultGroup.Item2.EventID = eventRow.ID; FaultGroupTable.AddFaultGroupRow(faultGroup.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultSegmentRow> faultSegment in m_faultSegmentList) { if (eventLookup.TryGetValue(faultSegment.Item1, out eventRow)) { faultSegment.Item2.EventID = eventRow.ID; FaultSegmentTable.AddFaultSegmentRow(faultSegment.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultCurveRow> faultCurve in m_faultCurveList) { if (eventLookup.TryGetValue(faultCurve.Item1, out eventRow)) { faultCurve.Item2.EventID = eventRow.ID; FaultCurveTable.AddFaultCurveRow(faultCurve.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultSummaryRow> faultSummary in m_faultSummaryList) { if (eventLookup.TryGetValue(faultSummary.Item1, out eventRow)) { faultSummary.Item2.EventID = eventRow.ID; FaultSummaryTable.AddFaultSummaryRow(faultSummary.Item2); } } }
public override void Execute(MeterDataSet meterDataSet) { CycleDataResource cycleDataResource = CycleDataResource.GetResource(meterDataSet, m_dbAdapterContainer); SagDataResource sagDataResource = SagDataResource.GetResource(meterDataSet, m_dbAdapterContainer); SwellDataResource swellDataResource = SwellDataResource.GetResource(meterDataSet, m_dbAdapterContainer); InterruptionDataResource interruptionDataResource = InterruptionDataResource.GetResource(meterDataSet, m_dbAdapterContainer); DisturbanceTableAdapter disturbanceAdapter = m_dbAdapterContainer.GetAdapter <DisturbanceTableAdapter>(); DisturbanceDataTable disturbanceTable = disturbanceAdapter.GetDataByFileGroup(meterDataSet.FileGroup.ID); SystemInfoDataContext systemInfo = m_dbAdapterContainer.GetAdapter <SystemInfoDataContext>(); m_disturbanceSeverityTable = new DisturbanceSeverityDataTable(); foreach (VoltageEnvelope envelope in systemInfo.VoltageEnvelopes) { foreach (DisturbanceRow disturbance in disturbanceTable) { IEnumerable <VoltageCurvePoint> points = envelope.VoltageCurves.Select(curve => curve.VoltageCurvePoints .Where(p => p.DurationSeconds <= disturbance.DurationSeconds) .OrderBy(p => p.LoadOrder) .LastOrDefault()); IEnumerable <int> severityCodes = points.Select(point => ((object)point != null) ? (int)((1.0D - disturbance.PerUnitMagnitude) / (1.0D - point.PerUnitMagnitude)) : 0); int maxSeverityCode = severityCodes .DefaultIfEmpty(0) .Max(); if (maxSeverityCode < 0) { maxSeverityCode = 0; } else if (maxSeverityCode > 5) { maxSeverityCode = 5; } m_disturbanceSeverityTable.AddDisturbanceSeverityRow(envelope.ID, disturbance.ID, maxSeverityCode); } } }
public override void Prepare(DbAdapterContainer dbAdapterContainer) { m_dbAdapterContainer = dbAdapterContainer; m_phaseLookup = new DataContextLookup <string, Phase>(dbAdapterContainer.GetAdapter <MeterInfoDataContext>(), phase => phase.Name); m_breakerOperationTable = new MeterData.BreakerOperationDataTable(); m_breakerOperations = new List <Tuple <EventKey, MeterData.BreakerOperationRow> >(); LoadBreakerOperationTypes(dbAdapterContainer); }
private int GetPhaseID(GSF.PQDIF.Logical.Phase phase) { if ((object)m_phaseLookup == null) { m_phaseLookup = new DataContextLookup <string, Phase>(m_dbAdapterContainer.GetAdapter <MeterInfoDataContext>(), ph => ph.Name); } return(m_phaseLookup.GetOrAdd(phase.ToString(), name => new Phase() { Name = name, Description = name }).ID); }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { Initialize(this); foreach (EventRow evt in dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(meterDataSet.FileGroup.ID)) { if (GetEmailCount(dbAdapterContainer, evt.ID) == 0) { QueueEventID(evt.ID); } } }
private void LoadHourOfWeekLimits(MeterDataSet meterDataSet) { Dictionary <Channel, List <TrendingDataSummaryResource.TrendingDataSummary> > trendingDataSummaries; HourOfWeekLimitTableAdapter hourlyLimitAdapter; AlarmData.HourOfWeekLimitDataTable hourlyLimitTable; TrendingDataSummaryResource.TrendingDataSummary summary; AlarmData.HourOfWeekLimitRow hourlyLimit; Channel channel; int channelID; trendingDataSummaries = meterDataSet.GetResource <TrendingDataSummaryResource>().TrendingDataSummaries; hourlyLimitAdapter = m_dbAdapterContainer.GetAdapter <HourOfWeekLimitTableAdapter>(); hourlyLimitTable = new AlarmData.HourOfWeekLimitDataTable(); foreach (KeyValuePair <Channel, List <TrendingDataSummaryResource.TrendingDataSummary> > summaries in trendingDataSummaries) { channel = summaries.Key; channelID = channel.ID; hourlyLimitAdapter.FillBy(hourlyLimitTable, channelID); // Update alarm log for each excursion foreach (Tuple <TrendingDataSummaryResource.TrendingDataSummary, AlarmData.HourOfWeekLimitRow> tuple in summaries.Value.Join(hourlyLimitTable, sum => GetHourOfWeek(sum.Time), row => row.HourOfWeek, Tuple.Create)) { summary = tuple.Item1; hourlyLimit = tuple.Item2; if (summary.Average < hourlyLimit.Low || summary.Average > hourlyLimit.High) { m_alarmLogTable.AddAlarmLogRow(channelID, hourlyLimit.AlarmTypeID, summary.Time, hourlyLimit.Severity, hourlyLimit.High, hourlyLimit.Low, summary.Average); } } } }
private static void DequeueEventID(int eventID) { ProcessQueue.Add(() => { MeterData.EventRow eventRow; MeterData.EventDataTable systemEvent; QueuedEventIDs.Remove(eventID); eventRow = s_dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByID(eventID)[0]; systemEvent = s_dbAdapterContainer.GetAdapter <EventTableAdapter>().GetSystemEvent(eventRow.StartTime, eventRow.EndTime, s_timeTolerance); if (systemEvent.Any(evt => evt.LineID == eventRow.LineID && QueuedEventIDs.Contains(evt.ID))) { return; } if (s_dbAdapterContainer.GetAdapter <EventFaultEmailTableAdapter>().GetFaultEmailCount(eventID) == 0) { GenerateEmail(eventID); } }); }
public static List <FlotSeries> GetFlotInfo(int eventID) { using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter <EventTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter <MeterInfoDataContext>(); FaultLocationInfoDataContext faultInfo = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); if ((object)eventRow == null) { return(new List <FlotSeries>()); } return(GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID) .Select(ToFlotSeries) .Concat(CycleDataInfo) .Concat(GetFaultCurveInfo(connection, eventID).Select(ToFlotSeries)) .ToList()); } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { CycleDataResource cycleDataResource; FaultDataResource faultDataResource; DataGroup dataGroup; FaultGroup faultGroup; List <int> seriesIDs; EventDataSet eventDataSet; string rootFileName; string fileName; cycleDataResource = meterDataSet.GetResource(() => CycleDataResource.GetResource(meterDataSet, dbAdapterContainer)); faultDataResource = meterDataSet.GetResource(() => new FaultDataResource(dbAdapterContainer)); if (!Directory.Exists(m_resultsPath)) { Directory.CreateDirectory(m_resultsPath); } for (int i = 0; i < cycleDataResource.DataGroups.Count; i++) { dataGroup = cycleDataResource.DataGroups[i]; if (faultDataResource.FaultLookup.TryGetValue(dataGroup, out faultGroup)) { rootFileName = FilePath.GetFileNameWithoutExtension(meterDataSet.FilePath); fileName = string.Format("{0},{1:000},Line{2}.dat", rootFileName, i, dataGroup.Line.AssetKey); seriesIDs = dataGroup.DataSeries .Select(series => series.SeriesInfo.ID) .ToList(); eventDataSet = new EventDataSet() { ResultsPath = Path.Combine(m_resultsPath, fileName), MeterDataSet = meterDataSet, TimeZoneOffset = GetTimeZoneOffset(meterDataSet.Meter.TimeZone, dataGroup.StartTime), DataGroup = dataGroup, VICycleDataGroup = cycleDataResource.VICycleDataGroups[i], Faults = faultGroup.Faults, OutputChannels = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>().OutputChannels.Where(channel => seriesIDs.Contains(channel.SeriesID)).ToList() }; WriteResults(eventDataSet); } } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { bool?faultDetectionResult; bool faultValidationResult; Initialize(this); foreach (var faultGroup in dbAdapterContainer.GetAdapter <FaultGroupTableAdapter>().GetDataByFileGroup(meterDataSet.FileGroup.ID)) { faultDetectionResult = !faultGroup.IsFaultDetectionLogicResultNull() ? Convert.ToBoolean(faultGroup.FaultDetectionLogicResult) : (bool?)null; faultValidationResult = Convert.ToBoolean(faultGroup.FaultValidationLogicResult); if (faultDetectionResult == true || (m_faultLocationSettings.UseDefaultFaultDetectionLogic && faultValidationResult)) { if (dbAdapterContainer.GetAdapter <EventFaultEmailTableAdapter>().GetFaultEmailCount(faultGroup.EventID) == 0) { QueueEventID(faultGroup.EventID); } } } }
public static List <FlotSeries> GetFlotInfo(int eventID) { using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter <EventTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter <MeterInfoDataContext>(); FaultLocationInfoDataContext faultInfo = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); if ((object)eventRow == null) { return(new List <FlotSeries>()); } List <Series> waveformInfo = GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID); var lookup = waveformInfo .Where(info => info.Channel.MeasurementCharacteristic.Name == "Instantaneous") .Where(info => new string[] { "Instantaneous", "Values" }.Contains(info.SeriesType.Name)) .Select(info => new { MeasurementType = info.Channel.MeasurementType.Name, Phase = info.Channel.Phase.Name }) .Distinct() .ToDictionary(info => info); IEnumerable <FlotSeries> cycleDataInfo = CycleDataInfo .Where(info => lookup.ContainsKey(new { info.MeasurementType, info.Phase })) .Select(info => info.Clone()); return(GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID) .Select(ToFlotSeries) .Concat(cycleDataInfo) .Concat(GetFaultCurveInfo(connection, eventID).Select(ToFlotSeries)) .ToList()); } }
private void ProcessChannelNormals(MeterDataSet meterDataSet) { Dictionary <Channel, List <DataGroup> > trendingGroups; HourlyTrendingSummaryTableAdapter hourlySummaryAdapter; MeterData.HourlyTrendingSummaryDataTable hourlySummaryTable; Channel channel; int channelID; double average; double meanSquare; trendingGroups = meterDataSet.GetResource <TrendingGroupsResource>().TrendingGroups; hourlySummaryAdapter = m_dbAdapterContainer.GetAdapter <HourlyTrendingSummaryTableAdapter>(); hourlySummaryTable = new MeterData.HourlyTrendingSummaryDataTable(); hourlySummaryAdapter.ClearBeforeFill = false; foreach (KeyValuePair <Channel, List <DataGroup> > channelGroups in trendingGroups) { channel = channelGroups.Key; channelID = channel.ID; hourlySummaryTable.Clear(); foreach (DataGroup dataGroup in channelGroups.Value) { hourlySummaryAdapter.FillBy(hourlySummaryTable, channelID, dataGroup.StartTime.AddHours(-1.0D), dataGroup.EndTime); } for (int i = hourlySummaryTable.Count - 1; i >= 0; i--) { if (hourlySummaryTable[i].ValidCount + hourlySummaryTable[i].InvalidCount < channel.SamplesPerHour) { hourlySummaryTable.Rows.RemoveAt(i); } } RemoveDuplicateRows(hourlySummaryTable); if (hourlySummaryTable.Count > 0) { average = hourlySummaryTable.Average(row => row.Average); meanSquare = hourlySummaryTable.Average(row => row.Average * row.Average); m_channelNormalTable.AddChannelNormalRow(channelID, average, meanSquare, 0.0D, hourlySummaryTable.Count); } } }
public void FillTables(DbAdapterContainer dbAdapterContainer) { MeterData.EventDataTable eventTable; Dictionary <EventKey, MeterData.EventRow> eventLookup; MeterData.EventRow eventRow; eventTable = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(MeterDataSet.FileGroup.ID); eventLookup = eventTable.Where(evt => evt.MeterID == MeterDataSet.Meter.ID).ToDictionary(CreateEventKey); foreach (Tuple <EventKey, FaultLocationData.FaultGroupRow> faultGroup in m_faultGroupList) { if (eventLookup.TryGetValue(faultGroup.Item1, out eventRow)) { faultGroup.Item2.EventID = eventRow.ID; FaultGroupTable.AddFaultGroupRow(faultGroup.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultSegmentRow> faultSegment in m_faultSegmentList) { if (eventLookup.TryGetValue(faultSegment.Item1, out eventRow)) { faultSegment.Item2.EventID = eventRow.ID; FaultSegmentTable.AddFaultSegmentRow(faultSegment.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultCurveRow> faultCurve in m_faultCurveList) { if (eventLookup.TryGetValue(faultCurve.Item1, out eventRow)) { faultCurve.Item2.EventID = eventRow.ID; FaultCurveTable.AddFaultCurveRow(faultCurve.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultSummaryRow> faultSummary in m_faultSummaryList) { if (eventLookup.TryGetValue(faultSummary.Item1, out eventRow)) { faultSummary.Item2.EventID = eventRow.ID; FaultSummaryTable.AddFaultSummaryRow(faultSummary.Item2); } } }
private void InitializeSegmentTypeLookup(DbAdapterContainer dbAdapterContainer) { Func <string, SegmentType> segmentTypeFactory; FaultLocationInfoDataContext faultLocationInfo; faultLocationInfo = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>(); segmentTypeFactory = name => { SegmentType segmentType = new SegmentType() { Name = name, Description = name }; faultLocationInfo.SegmentTypes.InsertOnSubmit(segmentType); return(segmentType); }; s_segmentTypeLookup = faultLocationInfo.SegmentTypes .GroupBy(segmentType => segmentType.Name) .ToDictionary(grouping => grouping.Key, grouping => { if (grouping.Count() > 1) { Log.Warn($"Duplicate segment type found: {grouping.Key}"); } return(grouping.First()); }); s_segmentTypeLookup.GetOrAdd("Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("AN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("AB Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BC Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CA Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("ABG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BCG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CAG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("3-Phase Fault", segmentTypeFactory); faultLocationInfo.SubmitChanges(); }
public void Initialize(DbAdapterContainer dbAdapterContainer, VICycleDataGroup viCycleDataGroup, double systemFrequency) { int samplesPerCycle = Transform.CalculateSamplesPerCycle(viCycleDataGroup.VA.RMS, systemFrequency); FaultSegment faultSegment = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>().FaultSegments .Where(segment => segment.EventID == Fault.EventID) .Where(segment => segment.StartTime == Fault.Inception) .FirstOrDefault(segment => segment.SegmentType.Name == "Fault"); if ((object)faultSegment != null) { StartSample = faultSegment.StartSample; EndSample = faultSegment.EndSample - samplesPerCycle + 1; CycleDataGroup = Rotate(viCycleDataGroup.ToSubSet(StartSample, EndSample)); DistanceCurve.StartIndex = StartSample; AngleCurve.StartIndex = StartSample; } }
public override void Execute(MeterDataSet meterDataSet) { Dictionary <Channel, List <TrendingDataSummaryResource.TrendingDataSummary> > trendingDataSummaries = meterDataSet.GetResource <TrendingDataSummaryResource>().TrendingDataSummaries; List <TrendingDataSummaryResource.TrendingDataSummary> validSummaries; IEnumerable <IGrouping <DateTime, TrendingDataSummaryResource.TrendingDataSummary> > dailySummaryGroups; DailyTrendingSummaryTableAdapter dailySummaryAdapter; MeterData.DailyTrendingSummaryDataTable dailySummaryTable; MeterData.DailyTrendingSummaryRow row; Log.Info("Executing operation to load daily summary data into the database..."); dailySummaryAdapter = m_dbAdapterContainer.GetAdapter <DailyTrendingSummaryTableAdapter>(); dailySummaryTable = new MeterData.DailyTrendingSummaryDataTable(); foreach (KeyValuePair <Channel, List <TrendingDataSummaryResource.TrendingDataSummary> > channelSummaries in trendingDataSummaries) { dailySummaryGroups = channelSummaries.Value .Where(summary => !summary.IsDuplicate) .GroupBy(summary => GetDate(summary.Time)); foreach (IGrouping <DateTime, TrendingDataSummaryResource.TrendingDataSummary> dailySummaryGroup in dailySummaryGroups) { validSummaries = dailySummaryGroup.Where(summary => summary.IsValid).ToList(); row = dailySummaryTable.NewDailyTrendingSummaryRow(); row.BeginEdit(); row.ChannelID = channelSummaries.Key.ID; row.Date = dailySummaryGroup.Key; row.Minimum = validSummaries.Select(summary => summary.Minimum).DefaultIfEmpty(0.0D).Min(); row.Maximum = validSummaries.Select(summary => summary.Minimum).DefaultIfEmpty(0.0D).Max(); row.Average = validSummaries.Select(summary => summary.Minimum).DefaultIfEmpty(0.0D).Average(); row.ValidCount = validSummaries.Count; row.InvalidCount = dailySummaryGroup.Count() - validSummaries.Count; row.EndEdit(); dailySummaryAdapter.Upsert(row); } } }
/// <summary> /// Processes data not yet processed /// by this SandBox instance. /// </summary> private void ProcessLatestDataOperation() { string latestDataFile = FilePath.GetAbsolutePath(@"LatestData.bin"); int latestFileGroupID; FileInfoDataContext fileInfo; List <int> newFileGroups; if ((object)m_systemSettings == null) { ReloadSystemSettings(); } using (FileBackedDictionary <string, int> dictionary = new FileBackedDictionary <string, int>(latestDataFile)) using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(m_systemSettings.DbConnectionString, m_systemSettings.DbTimeout)) { fileInfo = dbAdapterContainer.GetAdapter <FileInfoDataContext>(); do { dictionary.Compact(); if (!dictionary.TryGetValue("latestFileGroupID", out latestFileGroupID)) { latestFileGroupID = 0; } newFileGroups = fileInfo.FileGroups .Select(fileGroup => fileGroup.ID) .Where(id => id > latestFileGroupID) .Take(100) .OrderBy(id => id) .ToList(); foreach (int fileGroupID in newFileGroups) { MeterDataProcessor processor = new MeterDataProcessor(LoadSystemSettings()); processor.ProcessFileGroup(fileGroupID); dictionary["latestFileGroupID"] = fileGroupID; } }while (newFileGroups.Count > 0); } }
private Dictionary <EventClassification, int> GetEventTypeLookup(DbAdapterContainer dbAdapterContainer) { MeterData.EventTypeDataTable eventTypeTable = new MeterData.EventTypeDataTable(); EventClassification eventClassification = default(EventClassification); foreach (EventClassification classification in Enum.GetValues(typeof(EventClassification))) { eventTypeTable.AddEventTypeRow(classification.ToString(), classification.ToString()); } BulkLoader bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " + "USING {1} AS Source " + "ON Source.Name = Target.Name " + "WHEN NOT MATCHED THEN " + " INSERT (Name, Description) " + " VALUES (Source.Name, Source.Description);"; bulkLoader.Load(eventTypeTable); dbAdapterContainer.GetAdapter <EventTypeTableAdapter>().Fill(eventTypeTable); return(eventTypeTable .Where(row => Enum.TryParse(row.Name, out eventClassification)) .Select(row => new { EventClassification = eventClassification, row.ID }) .ToList() .GroupBy(obj => obj.EventClassification) .ToDictionary(grouping => grouping.Key, grouping => { if (grouping.Count() > 1) { Log.Warn($"Found duplicate event type: {grouping.Key}"); } return grouping.First().ID; })); }
private Dictionary <BreakerOperationType, int> GetBreakerOperationTypeLookup(DbAdapterContainer dbAdapterContainer) { BreakerOperationTypeDataTable breakerOperationTypeTable = new BreakerOperationTypeDataTable(); BreakerOperationType breakerOperationType = default(BreakerOperationType); foreach (BreakerOperationType operationType in Enum.GetValues(typeof(BreakerOperationType))) { breakerOperationTypeTable.AddBreakerOperationTypeRow(operationType.ToString(), operationType.ToString()); } BulkLoader bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " + "USING {1} AS Source " + "ON Source.Name = Target.Name " + "WHEN NOT MATCHED THEN " + " INSERT (Name, Description) " + " VALUES (Source.Name, Source.Description);"; bulkLoader.Load(breakerOperationTypeTable); dbAdapterContainer.GetAdapter <BreakerOperationTypeTableAdapter>().Fill(breakerOperationTypeTable); foreach (IGrouping <string, BreakerOperationTypeRow> grouping in breakerOperationTypeTable.GroupBy(row => row.Name)) { if (grouping.Count() > 1) { Log.Warn($"Found duplicate breaker operation type: {grouping.Key}"); } } return(breakerOperationTypeTable .Where(row => Enum.TryParse(row.Name, out breakerOperationType)) .Select(row => new { BreakerOperationType = breakerOperationType, row.ID }) .ToList() .DistinctBy(obj => obj.BreakerOperationType) .ToDictionary(obj => obj.BreakerOperationType, obj => obj.ID)); }
private void ProcessMeterData(MeterDataSet meterDataSet, DbAdapterContainer dbAdapterContainer) { try { meterDataSet.ConnectionString = m_connectionString; ExecuteDataOperation(meterDataSet, dbAdapterContainer); ExecuteDataWriters(meterDataSet, dbAdapterContainer); } catch (Exception ex) { try { OnHandleException(ex); meterDataSet.FileGroup.Error = 1; dbAdapterContainer.GetAdapter <FileInfoDataContext>().SubmitChanges(); } catch { // Ignore errors here as they are most likely // related to the error we originally caught } } }
private void InitializeSegmentTypeLookup(DbAdapterContainer dbAdapterContainer) { Func <string, SegmentType> segmentTypeFactory; FaultLocationInfoDataContext faultLocationInfo; faultLocationInfo = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>(); segmentTypeFactory = name => { SegmentType segmentType = new SegmentType() { Name = name, Description = name }; faultLocationInfo.SegmentTypes.InsertOnSubmit(segmentType); return(segmentType); }; s_segmentTypeLookup = faultLocationInfo.SegmentTypes.ToDictionary(segmentType => segmentType.Name); s_segmentTypeLookup.GetOrAdd("Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("AN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("AB Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BC Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CA Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("ABG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BCG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CAG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("3-Phase Fault", segmentTypeFactory); faultLocationInfo.SubmitChanges(); }
public override void Load(DbAdapterContainer dbAdapterContainer) { EventDataTable eventTable; Dictionary <EventKey, EventRow> eventLookup; EventRow eventRow; BulkLoader bulkLoader; eventTable = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(m_meterDataSet.FileGroup.ID); eventLookup = eventTable .Where(evt => evt.MeterID == m_meterDataSet.Meter.ID) .GroupBy(CreateEventKey) .ToDictionary(grouping => grouping.Key, grouping => { if (grouping.Count() > 1) { Log.Warn($"Found duplicate events for meter {m_meterDataSet.Meter.AssetKey}: {string.Join(", ", grouping.Select(evt => evt.ID))}"); } return(grouping.First()); }); foreach (Tuple <EventKey, BreakerOperationRow> breakerOperation in m_breakerOperations) { if (eventLookup.TryGetValue(breakerOperation.Item1, out eventRow)) { breakerOperation.Item2.EventID = eventRow.ID; m_breakerOperationTable.AddBreakerOperationRow(breakerOperation.Item2); } } bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.Load(m_breakerOperationTable); }
/// <summary> /// Reloads system configuration from configuration sources. /// </summary> public void ReloadConfiguration() { SystemInfoDataContext systemInfo; List<Type> types; string connectionString; IConfigurationLoader configurationLoader; // If system settings is null, // attempt to reload system settings if ((object)m_systemSettings == null) ReloadSystemSettings(); // If system settings is still null, give up if ((object)m_systemSettings == null) return; using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(m_systemSettings.DbConnectionString)) { systemInfo = dbAdapterContainer.GetAdapter<SystemInfoDataContext>(); types = systemInfo.ConfigurationLoaders .OrderBy(configLoader => configLoader.LoadOrder) .AsEnumerable() .Select(configLoader => LoadType(configLoader.AssemblyName, configLoader.TypeName)) .Where(type => (object)type != null) .Where(type => typeof(IConfigurationLoader).IsAssignableFrom(type)) .Where(type => (object)type.GetConstructor(Type.EmptyTypes) != null) .ToList(); connectionString = LoadSystemSettings(systemInfo); foreach (Type type in types) { try { OnStatusMessage("[{0}] Loading configuration...", type.Name); // Create an instance of the configuration loader configurationLoader = (IConfigurationLoader)Activator.CreateInstance(type); // Use the connection string parser to load system settings into the configuration loader ConnectionStringParser.ParseConnectionString(connectionString, configurationLoader); // Update configuration by calling the configuration loader's UpdateConfiguration method configurationLoader.UpdateConfiguration(dbAdapterContainer); OnStatusMessage("[{0}] Done loading configuration.", type.Name); } catch (Exception ex) { string message = string.Format("[{0}] Unable to update configuration due to exception: {1}", type.Name, ex.Message); OnProcessException(new InvalidOperationException(message, ex)); } } } }
// Updates the Filter property of the FileProcessor with the // latest collection of filters from the DataReader table. private void UpdateFileProcessorFilter(SystemSettings systemSettings) { SystemInfoDataContext systemInfo; List<string> filterPatterns; // Do not attempt to load filter patterns if file processor is not defined if ((object)m_fileProcessor == null) return; // Get the list of file extensions to be processed by openXDA using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { systemInfo = dbAdapterContainer.GetAdapter<SystemInfoDataContext>(); filterPatterns = systemInfo.DataReaders .Select(reader => reader.FilePattern) .ToList(); } m_fileProcessor.Filter = string.Join(Path.PathSeparator.ToString(), filterPatterns); }
// Instantiates and executes data operations and data writers to process the meter data set. private void ProcessMeterDataSet(MeterDataSet meterDataSet, SystemSettings systemSettings, DbAdapterContainer dbAdapterContainer) { SystemInfoDataContext systemInfo; List<DataOperationWrapper> dataOperations = null; List<DataWriterWrapper> dataWriters = null; // Get the SystemInfoDataContext from the dbAdapterContainer systemInfo = dbAdapterContainer.GetAdapter<SystemInfoDataContext>(); // Load the data operations from the database, // in descending order so we can remove records while we iterate dataOperations = systemInfo.DataOperations .OrderByDescending(dataOperation => dataOperation.LoadOrder) .Select(Wrap) .Where(wrapper => (object)wrapper != null) .ToList(); for (int i = dataOperations.Count - 1; i >= 0; i--) { try { Log.Debug($"Preparing data operation '{dataOperations[i].DataObject.GetType().Name}' for execution..."); // Load configuration parameters from the connection string into the data operation ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, dataOperations[i].DataObject); // Call the prepare method to allow the data operation to prepare any data it needs from the database dataOperations[i].DataObject.Prepare(dbAdapterContainer); Log.Debug($"Finished preparing data operation '{dataOperations[i].DataObject.GetType().Name}' for execution."); } catch (Exception ex) { // Log the error and remove the data operation from the list string message = $"An error occurred while preparing data from meter '{meterDataSet.Meter.AssetKey}' for data operation of type '{dataOperations[i].DataObject.GetType().FullName}': {ex.Message}"; OnProcessException(new Exception(message, ex)); dataOperations[i].Dispose(); dataOperations.RemoveAt(i); } } for (int i = dataOperations.Count - 1; i >= 0; i--) { try { Log.Debug($"Executing data operation '{dataOperations[i].DataObject.GetType().Name}'..."); // Call the execute method on the data operation to perform in-memory data transformations dataOperations[i].DataObject.Execute(meterDataSet); Log.Debug($"Finished execurting data operation '{dataOperations[i].DataObject.GetType().Name}'."); } catch (Exception ex) { // Log the error and skip to the next data operation string message = $"An error occurred while executing data operation of type '{dataOperations[i].DataObject.GetType().FullName}' on data from meter '{meterDataSet.Meter.AssetKey}': {ex.Message}"; OnProcessException(new Exception(message, ex)); continue; } try { Log.Debug($"Loading data from data operation '{dataOperations[i].DataObject.GetType().Name}' into database..."); // Call the load method inside a transaction to load data into from the data operation into the database using (TransactionScope transaction = new TransactionScope(TransactionScopeOption.Required, GetTransactionOptions())) { dataOperations[i].DataObject.Load(dbAdapterContainer); transaction.Complete(); } Log.Debug($"Finished loading data from data operation '{dataOperations[i].DataObject.GetType().Name}' into database."); } catch (Exception ex) { // Log the error and move on to the next data operation string message = $"An error occurred while loading data from data operation of type '{dataOperations[i].DataObject.GetType().FullName}' for data from meter '{meterDataSet.Meter.AssetKey}': {ex.Message}"; OnProcessException(new Exception(message, ex)); } } // All data operations are complete, but we still need to clean up for (int i = dataOperations.Count - 1; i >= 0; i--) dataOperations[i].Dispose(); // Load the data writers from the database dataWriters = systemInfo.DataWriters .OrderBy(dataWriter => dataWriter.LoadOrder) .Select(Wrap) .Where(wrapper => (object)wrapper != null) .ToList(); foreach (DataWriterWrapper dataWriter in dataWriters) { try { Log.Debug($"Writing results to external location with data writer '{dataWriter.DataObject.GetType().Name}'..."); // Load configuration parameters from the connection string into the data writer ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, dataWriter.DataObject); // Write the results to the data writer's destination by calling the WriteResults method dataWriter.DataObject.WriteResults(dbAdapterContainer, meterDataSet); Log.Debug($"Finished writing results with data writer '{dataWriter.DataObject.GetType().Name}'."); } catch (Exception ex) { // Log the error and move on to the next data writer string message = $"An error occurred while writing data from meter '{meterDataSet.Meter.AssetKey}' using data writer of type '{dataWriter.DataObject.GetType().FullName}': {ex.Message}"; OnProcessException(new Exception(message, ex)); } } // All data writers are complete, but we still need to clean up foreach (DataWriterWrapper dataWriter in dataWriters) dataWriter.Dispose(); }
// Processes the file to determine if it can be parsed and kicks off the meter's processing thread. private void ProcessFile(FileProcessorEventArgs fileProcessorArgs, string connectionString, SystemSettings systemSettings, DbAdapterContainer dbAdapterContainer) { string filePath; string meterKey; FileInfoDataContext fileInfo; SystemInfoDataContext systemInfo; DataReader dataReader; DataReaderWrapper dataReaderWrapper; FileWrapper fileWrapper; int queuedFileCount; filePath = fileProcessorArgs.FullPath; fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); // Determine whether the file has already been // processed or needs to be processed again if (fileProcessorArgs.AlreadyProcessed) { DataFile dataFile = fileInfo.DataFiles .Where(file => file.FilePathHash == filePath.GetHashCode()) .Where(file => file.FilePath == filePath) .MaxBy(file => file.ID); // This will tell us whether the service was stopped in the middle // of processing the last time it attempted to process the file if ((object)dataFile != null && dataFile.FileGroup.ProcessingEndTime > DateTime.MinValue) { Log.Debug($"Skipped file \"{filePath}\" because it has already been processed."); return; } } // Get the data reader that will be used to parse the file systemInfo = dbAdapterContainer.GetAdapter<SystemInfoDataContext>(); dataReader = systemInfo.DataReaders .OrderBy(reader => reader.LoadOrder) .AsEnumerable() .FirstOrDefault(reader => FilePath.IsFilePatternMatch(reader.FilePattern, filePath, true)); if ((object)dataReader == null) { // Because the file processor is filtering files based on the DataReader file patterns, // this should only ever occur if the configuration changes during runtime UpdateFileProcessorFilter(systemSettings); throw new FileSkippedException($"Skipped file \"{filePath}\" because no data reader could be found to process the file."); } dataReaderWrapper = Wrap(dataReader); try { meterKey = null; // Determine whether the database contains configuration information for the meter that produced this file if ((object)dataReaderWrapper.DataObject.MeterDataSet != null) meterKey = GetMeterKey(filePath, systemSettings.FilePattern); // Apply connection string settings to the data reader ConnectionStringParser.ParseConnectionString(connectionString, dataReaderWrapper.DataObject); // Get the file wrapper from the lookup table fileWrapper = m_fileWrapperLookup.GetOrAdd(filePath, path => new FileWrapper(path)); // Determine whether the dataReader can parse the file if (!dataReaderWrapper.DataObject.CanParse(filePath, fileWrapper.GetMaxFileCreationTime())) { fileProcessorArgs.Requeue = true; dataReaderWrapper.Dispose(); return; } // Get the thread used to process this data GetThread(meterKey).Push(() => ParseFile(connectionString, systemSettings, filePath, meterKey, dataReaderWrapper, fileWrapper)); // Keep track of the number of operations in thread queues queuedFileCount = Interlocked.Increment(ref m_queuedFileCount); while (!m_stopped && !m_disposed && m_queuedFileCount >= systemSettings.MaxQueuedFileCount) Thread.Sleep(1000); } catch { // If an error occurs here, dispose of the data reader; // otherwise, the meter data thread will handle it dataReaderWrapper.Dispose(); throw; } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { Initialize(this); foreach (EventRow evt in dbAdapterContainer.GetAdapter<EventTableAdapter>().GetDataByFileGroup(meterDataSet.FileGroup.ID)) { if (GetEmailCount(dbAdapterContainer, evt.ID) == 0) QueueEventID(evt.ID); } }
public List<FlotSeries> GetFlotData(int eventID, List<int> seriesIndexes) { List<FlotSeries> flotSeriesList = new List<FlotSeries>(); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = dbAdapterContainer.GetAdapter<EventDataTableAdapter>(); FaultCurveTableAdapter faultCurveAdapter = dbAdapterContainer.GetAdapter<FaultCurveTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultLocationInfoDataContext faultLocationInfo = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); Meter meter = meterInfo.Meters.First(m => m.ID == eventRow.MeterID); List<FlotSeries> flotInfo = GetFlotInfo(eventID); DateTime epoch = new DateTime(1970, 1, 1); Lazy<Dictionary<int, DataSeries>> waveformData = new Lazy<Dictionary<int, DataSeries>>(() => { return ToDataGroup(meter, eventDataAdapter.GetTimeDomainData(eventRow.EventDataID)).DataSeries .ToDictionary(dataSeries => dataSeries.SeriesInfo.ID); }); Lazy<DataGroup> cycleData = new Lazy<DataGroup>(() => ToDataGroup(meter, eventDataAdapter.GetFrequencyDomainData(eventRow.EventDataID))); Lazy<Dictionary<string, DataSeries>> faultCurveData = new Lazy<Dictionary<string, DataSeries>>(() => { return faultCurveAdapter .GetDataBy(eventRow.ID) .Select(faultCurve => new { Algorithm = faultCurve.Algorithm, DataGroup = ToDataGroup(meter, faultCurve.Data) }) .Where(obj => obj.DataGroup.DataSeries.Count > 0) .ToDictionary(obj => obj.Algorithm, obj => obj.DataGroup[0]); }); foreach (int index in seriesIndexes) { DataSeries dataSeries = null; FlotSeries flotSeries; if (index >= flotInfo.Count) continue; flotSeries = flotInfo[index]; if (flotSeries.FlotType == FlotSeriesType.Waveform) { if (!waveformData.Value.TryGetValue(flotSeries.SeriesID, out dataSeries)) continue; } else if (flotSeries.FlotType == FlotSeriesType.Cycle) { dataSeries = cycleData.Value.DataSeries .Where(series => series.SeriesInfo.Channel.MeasurementType.Name == flotSeries.MeasurementType) .Where(series => series.SeriesInfo.Channel.Phase.Name == flotSeries.Phase) .Skip(flotSeries.SeriesID) .FirstOrDefault(); if ((object)dataSeries == null) continue; } else if (flotSeries.FlotType == FlotSeriesType.Fault) { string algorithm = flotSeries.ChannelName; if (!faultCurveData.Value.TryGetValue(algorithm, out dataSeries)) continue; } else { continue; } foreach (DataPoint dataPoint in dataSeries.DataPoints) { if (!double.IsNaN(dataPoint.Value)) flotSeries.DataPoints.Add(new double[] { dataPoint.Time.Subtract(epoch).TotalMilliseconds, dataPoint.Value }); } flotSeriesList.Add(flotSeries); } } return flotSeriesList; }
protected void Page_Load(object sender, EventArgs e) { if (!IsPostBack) { if (Request["eventId"] != null) { if (Request["faultcurves"] != null) postedShowFaultCurves = Request["faultcurves"]; if (Request["breakerdigitals"] != null) postedShowBreakerDigitals = Request["breakerdigitals"]; postedURLQueryString = string.Concat(Request.QueryString.AllKeys .Where(key => !key.Equals("eventId", StringComparison.OrdinalIgnoreCase)) .Select(key => "&" + HttpUtility.UrlEncode(key) + "=" + HttpUtility.UrlEncode(Request.QueryString[key]))); postedEventId = Request["eventId"]; using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(connectionString)) { try { EventTypeTableAdapter eventTypeAdapter = dbAdapterContainer.GetAdapter<EventTypeTableAdapter>(); EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultSummaryTableAdapter summaryInfo = dbAdapterContainer.GetAdapter<FaultSummaryTableAdapter>(); DisturbanceTableAdapter disturbanceAdapter = dbAdapterContainer.GetAdapter<DisturbanceTableAdapter>(); MeterData.EventRow theevent = eventAdapter.GetDataByID(Convert.ToInt32(postedEventId)).First(); JavaScriptSerializer serializer = new JavaScriptSerializer(); postedSeriesList = serializer.Serialize(SignalCode.GetFlotInfo(theevent.ID)); postedMeterId = theevent.MeterID.ToString(); postedDate = theevent.StartTime.ToShortDateString(); postedEventId = theevent.ID.ToString(); postedEventDate = theevent.StartTime.ToString("yyyy-MM-dd HH:mm:ss.fffffff"); postedEventMilliseconds = theevent.StartTime.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds.ToString(); postedMeterName = meterInfo.Meters.Single(m => m.ID == theevent.MeterID).Name; MeterData.EventTypeDataTable eventTypes = eventTypeAdapter.GetData(); postedAdjacentEventIds = GetPreviousAndNextEventIds(theevent.ID, dbAdapterContainer.Connection); postedLineName = meterInfo.MeterLines.Where(row => row.LineID == theevent.LineID) .Where(row => row.MeterID == theevent.MeterID) .Select(row => row.LineName) .FirstOrDefault() ?? ""; postedLineLength = meterInfo.Lines .Where(row => row.ID == theevent.LineID) .Select(row => row.Length) .AsEnumerable() .Select(length => length.ToString()) .FirstOrDefault() ?? ""; postedEventName = eventTypes .Where(row => row.ID == theevent.EventTypeID) .Select(row => row.Name) .DefaultIfEmpty("") .Single(); if (postedEventName.Equals("Fault")) { FaultLocationData.FaultSummaryDataTable thesummarydatatable = summaryInfo.GetDataBy(Convert.ToInt32(postedEventId)); FaultLocationData.FaultSummaryRow thesummary = thesummarydatatable .Where(row => row.IsSelectedAlgorithm == 1) .OrderBy(row => row.IsSuppressed) .ThenBy(row => row.Inception) .FirstOrDefault(); if ((object)thesummary != null) { postedStartTime = thesummary.Inception.TimeOfDay.ToString(); postedDurationPeriod = thesummary.DurationCycles.ToString("##.##", CultureInfo.InvariantCulture) + " cycles"; postedMagnitude = thesummary.CurrentMagnitude.ToString("####.#", CultureInfo.InvariantCulture) + " Amps (RMS)"; } } else if (new[] { "Sag", "Swell" }.Contains(postedEventName)) { MeterData.DisturbanceDataTable disturbanceTable = disturbanceAdapter.GetDataBy(theevent.ID); MeterData.DisturbanceRow disturbance = disturbanceTable .Where(row => row.EventTypeID == theevent.EventTypeID) .OrderBy(row => row.StartTime) .FirstOrDefault(); if ((object)disturbance != null) { postedStartTime = disturbance.StartTime.TimeOfDay.ToString(); postedDurationPeriod = disturbance.DurationCycles.ToString("##.##", CultureInfo.InvariantCulture) + " cycles"; if (disturbance.PerUnitMagnitude != -1.0e308) postedMagnitude = disturbance.PerUnitMagnitude.ToString("N3", CultureInfo.InvariantCulture) + " pu (RMS)"; } } } catch (Exception ex) { postedErrorMessage = ex.Message; } } } } }
/// <summary> /// Processes the file at the given path. /// </summary> /// <param name="fileGroupID">The identifier for the file group to be processed.</param> /// <returns>False if the file was not able to be processed and needs to be processed again later.</returns> public bool ProcessFileGroup(int fileGroupID) { SystemSettings systemSettings; FileInfoDataContext fileInfo; FileGroup fileGroup; DataFile dataFile = null; List<MeterDataSet> meterDataSets; try { systemSettings = new SystemSettings(m_connectionString); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); // Create a file group for this file in the database fileGroup = LoadFileGroup(fileInfo, fileGroupID); if ((object)fileGroup == null) return true; dataFile = fileGroup.DataFiles.FirstOrDefault(); if ((object)dataFile == null) return true; // Parse the file meterDataSets = LoadMeterDataSets(dbAdapterContainer, fileGroup); // Set properties on each of the meter data sets foreach (MeterDataSet meterDataSet in meterDataSets) { meterDataSet.ConnectionString = m_connectionString; meterDataSet.FilePath = dataFile.FilePath; meterDataSet.FileGroup = fileGroup; } // Process meter data sets OnStatusMessage("Processing meter data from file \"{0}\"...", dataFile.FilePath); ProcessMeterDataSets(meterDataSets, systemSettings, dbAdapterContainer); OnStatusMessage("Finished processing data from file \"{0}\".", dataFile.FilePath); } } catch (Exception ex) { string message; if ((object)dataFile != null) message = string.Format("Failed to process file \"{0}\" due to exception: {1}", dataFile.FilePath, ex.Message); else message = string.Format("Failed to process file group \"{0}\" due to exception: {1}", fileGroupID, ex.Message); OnHandleException(new InvalidOperationException(message, ex)); } return true; }
private void ProcessMeterData(MeterDataSet meterDataSet, DbAdapterContainer dbAdapterContainer) { try { meterDataSet.ConnectionString = m_connectionString; ExecuteDataOperation(meterDataSet, dbAdapterContainer); ExecuteDataWriters(meterDataSet, dbAdapterContainer); } catch (Exception ex) { try { OnHandleException(ex); meterDataSet.FileGroup.Error = 1; dbAdapterContainer.GetAdapter<FileInfoDataContext>().SubmitChanges(); } catch { // Ignore errors here as they are most likely // related to the error we originally caught } } }
private List<MeterDataSet> LoadMeterDataSets(DbAdapterContainer dbAdapterContainer, FileGroup fileGroup) { List<MeterDataSet> meterDataSets = new List<MeterDataSet>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = dbAdapterContainer.GetAdapter<EventDataTableAdapter>(); MeterData.EventDataTable eventTable = eventAdapter.GetDataByFileGroup(fileGroup.ID); MeterDataSet meterDataSet; DataGroup dataGroup; foreach (IGrouping<int, MeterData.EventRow> eventGroup in eventTable.GroupBy(evt => evt.MeterID)) { meterDataSet = new MeterDataSet(); meterDataSet.Meter = meterInfo.Meters.SingleOrDefault(meter => meter.ID == eventGroup.Key); foreach (MeterData.EventRow evt in eventGroup) { dataGroup = new DataGroup(); dataGroup.FromData(meterDataSet.Meter, eventDataAdapter.GetTimeDomainData(evt.EventDataID)); foreach (DataSeries dataSeries in dataGroup.DataSeries) meterDataSet.DataSeries.Add(dataSeries); } meterDataSets.Add(meterDataSet); } return meterDataSets; }
public static List<FlotSeries> GetFlotInfo(int eventID) { using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultLocationInfoDataContext faultInfo = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); if ((object)eventRow == null) return new List<FlotSeries>(); return GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID) .Select(ToFlotSeries) .Concat(CycleDataInfo) .Concat(GetFaultCurveInfo(connection, eventID).Select(ToFlotSeries)) .ToList(); } }
public List<FlotSeries> GetFlotData(int eventID, List<int> seriesIndexes) { List<FlotSeries> flotSeriesList = new List<FlotSeries>(); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = dbAdapterContainer.GetAdapter<EventDataTableAdapter>(); FaultCurveTableAdapter faultCurveAdapter = dbAdapterContainer.GetAdapter<FaultCurveTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultLocationInfoDataContext faultLocationInfo = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); Meter meter = meterInfo.Meters.First(m => m.ID == eventRow.MeterID); List<Series> waveformInfo = GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID); List<string> faultCurveInfo = GetFaultCurveInfo(connection, eventID); DateTime epoch = new DateTime(1970, 1, 1); Lazy<Dictionary<int, DataSeries>> waveformData = new Lazy<Dictionary<int, DataSeries>>(() => { return ToDataGroup(meter, eventDataAdapter.GetTimeDomainData(eventRow.EventDataID)).DataSeries .ToDictionary(dataSeries => dataSeries.SeriesInfo.ID); }); Lazy<DataGroup> cycleData = new Lazy<DataGroup>(() => ToDataGroup(meter, eventDataAdapter.GetFrequencyDomainData(eventRow.EventDataID))); Lazy<Dictionary<string, DataSeries>> faultCurveData = new Lazy<Dictionary<string, DataSeries>>(() => { return faultCurveAdapter .GetDataBy(eventRow.ID) .Select(faultCurve => new { Algorithm = faultCurve.Algorithm, DataGroup = ToDataGroup(meter, faultCurve.Data) }) .Where(obj => obj.DataGroup.DataSeries.Count > 0) .ToDictionary(obj => obj.Algorithm, obj => obj.DataGroup[0]); }); foreach (int index in seriesIndexes) { DataSeries dataSeries = null; FlotSeries flotSeries = null; int waveformIndex = index; int cycleIndex = waveformIndex - waveformInfo.Count; int faultCurveIndex = cycleIndex - CycleDataInfo.Count; if (waveformIndex < waveformInfo.Count) { if (!waveformData.Value.TryGetValue(waveformInfo[index].ID, out dataSeries)) continue; flotSeries = ToFlotSeries(waveformInfo[index]); } else if (cycleIndex < CycleDataInfo.Count) { if (cycleIndex >= cycleData.Value.DataSeries.Count) continue; dataSeries = cycleData.Value[cycleIndex]; flotSeries = new FlotSeries() { MeasurementType = CycleDataInfo[cycleIndex].MeasurementType, MeasurementCharacteristic = CycleDataInfo[cycleIndex].MeasurementCharacteristic, Phase = CycleDataInfo[cycleIndex].Phase, SeriesType = CycleDataInfo[cycleIndex].SeriesType }; } else if (faultCurveIndex < faultCurveInfo.Count) { string algorithm = faultCurveInfo[faultCurveIndex]; if (!faultCurveData.Value.TryGetValue(algorithm, out dataSeries)) continue; flotSeries = ToFlotSeries(faultCurveInfo[faultCurveIndex]); } else { continue; } foreach (DataPoint dataPoint in dataSeries.DataPoints) { if (!double.IsNaN(dataPoint.Value)) flotSeries.DataPoints.Add(new double[] { dataPoint.Time.Subtract(epoch).TotalMilliseconds, dataPoint.Value }); } flotSeriesList.Add(flotSeries); } } return flotSeriesList; }
// Called when the file processor has picked up a file in one of the watch // directories. This handler validates the file and processes it if able. private void FileProcessor_Processing(object sender, FileProcessorEventArgs fileProcessorEventArgs) { if (m_disposed) return; try { string filePath; string connectionString; SystemSettings systemSettings; filePath = fileProcessorEventArgs.FullPath; if (!FilePath.TryGetReadLockExclusive(filePath)) { fileProcessorEventArgs.Requeue = true; return; } connectionString = LoadSystemSettings(); systemSettings = new SystemSettings(connectionString); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { try { ProcessFile( fileProcessorArgs: fileProcessorEventArgs, connectionString: connectionString, systemSettings: systemSettings, dbAdapterContainer: dbAdapterContainer); } catch (Exception ex) { // There may be a problem here where the outer exception's call stack // was overwritten by the call stack of the point where it was thrown ExceptionDispatchInfo exInfo = ExceptionDispatchInfo.Capture(ex); try { // Attempt to set the error flag on the file group FileInfoDataContext fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); FileWrapper fileWrapper = m_fileWrapperLookup.GetOrAdd(filePath, path => new FileWrapper(path)); FileGroup fileGroup = fileWrapper.GetFileGroup(fileInfo, systemSettings.XDATimeZoneInfo); fileGroup.ProcessingEndTime = fileGroup.ProcessingStartTime; fileGroup.Error = 1; fileInfo.SubmitChanges(); } catch (Exception fileGroupError) { // Log exceptions that occur when setting the error flag on the file group string message = $"Exception occurred setting error flag on file group: {fileGroupError.Message}"; OnProcessException(new Exception(message, fileGroupError)); } // Throw the original exception exInfo.Throw(); } } } catch (FileSkippedException) { // Do not wrap FileSkippedExceptions because // these only generate warning messages throw; } catch (Exception ex) { // Wrap all other exceptions to include the file path in the message string message = $"Exception occurred processing file \"{fileProcessorEventArgs.FullPath}\": {ex.Message}"; throw new Exception(message, ex); } finally { // Make sure to clean up file wrappers from // the lookup table to prevent memory leaks if (!fileProcessorEventArgs.Requeue) m_fileWrapperLookup.Remove(fileProcessorEventArgs.FullPath); } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { CycleDataResource cycleDataResource; FaultDataResource faultDataResource; DataGroup dataGroup; FaultGroup faultGroup; List<int> seriesIDs; EventDataSet eventDataSet; string rootFileName; string fileName; cycleDataResource = meterDataSet.GetResource(() => CycleDataResource.GetResource(meterDataSet, dbAdapterContainer)); faultDataResource = meterDataSet.GetResource(() => new FaultDataResource(dbAdapterContainer)); if (!Directory.Exists(m_resultsPath)) Directory.CreateDirectory(m_resultsPath); for (int i = 0; i < cycleDataResource.DataGroups.Count; i++) { dataGroup = cycleDataResource.DataGroups[i]; if (faultDataResource.FaultLookup.TryGetValue(dataGroup, out faultGroup)) { rootFileName = FilePath.GetFileNameWithoutExtension(meterDataSet.FilePath); fileName = string.Format("{0},{1:000},Line{2}.dat", rootFileName, i, dataGroup.Line.AssetKey); seriesIDs = dataGroup.DataSeries .Select(series => series.SeriesInfo.ID) .ToList(); eventDataSet = new EventDataSet() { ResultsPath = Path.Combine(m_resultsPath, fileName), MeterDataSet = meterDataSet, TimeZoneOffset = GetTimeZoneOffset(meterDataSet.Meter.TimeZone, dataGroup.StartTime), DataGroup = dataGroup, VICycleDataGroup = cycleDataResource.VICycleDataGroups[i], Faults = faultGroup.Faults, OutputChannels = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>().OutputChannels.Where(channel => seriesIDs.Contains(channel.SeriesID)).ToList() }; WriteResults(eventDataSet); } } }
// Parses the file on the meter's processing thread and kicks off processing of the meter data set. private void ParseFile(string connectionString, SystemSettings systemSettings, string filePath, string meterKey, DataReaderWrapper dataReaderWrapper, FileWrapper fileWrapper) { FileGroup fileGroup = null; MeterDataSet meterDataSet; int queuedFileCount; // Keep track of the number of operations in thread queues queuedFileCount = Interlocked.Decrement(ref m_queuedFileCount); if (m_stopped || m_disposed) { dataReaderWrapper.Dispose(); return; } using (dataReaderWrapper) using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { try { // Keep track of the meters and files currently being processed if ((object)meterKey != null) m_activeFiles[meterKey] = filePath; ThreadContext.Properties["Meter"] = meterKey; // Create the file group fileGroup = fileWrapper.GetFileGroup(dbAdapterContainer.GetAdapter<FileInfoDataContext>(), systemSettings.XDATimeZoneInfo); // Parse the file to turn it into a meter data set OnStatusMessage($"Parsing data from file \"{filePath}\"..."); dataReaderWrapper.DataObject.Parse(filePath); OnStatusMessage($"Finished parsing data from file \"{filePath}\"."); meterDataSet = dataReaderWrapper.DataObject.MeterDataSet; // If the data reader does not return a data set, // there is nothing left to do if ((object)meterDataSet == null) return; // Data reader has finally outlived its usefulness dataReaderWrapper.Dispose(); // Set file path, file group, connection string, // and meter asset key for the meter data set meterDataSet.FilePath = filePath; meterDataSet.FileGroup = fileGroup; meterDataSet.ConnectionString = connectionString; meterDataSet.Meter.AssetKey = meterKey; // Shift date/time values to the configured time zone and set the start and end time values on the file group ShiftTime(meterDataSet, meterDataSet.Meter.GetTimeZoneInfo(systemSettings.DefaultMeterTimeZoneInfo), systemSettings.XDATimeZoneInfo); SetDataTimeRange(meterDataSet, dbAdapterContainer.GetAdapter<FileInfoDataContext>()); // Determine whether the file duration is within a user-defined maximum tolerance ValidateFileDuration(meterDataSet.FilePath, systemSettings.MaxFileDuration, meterDataSet.FileGroup); // Determine whether the timestamps in the file extend beyond user-defined thresholds ValidateFileTimestamps(meterDataSet.FilePath, meterDataSet.FileGroup, systemSettings, dbAdapterContainer.GetAdapter<FileInfoDataContext>()); // Process the meter data set OnStatusMessage($"Processing meter data from file \"{filePath}\"..."); ProcessMeterDataSet(meterDataSet, systemSettings, dbAdapterContainer); OnStatusMessage($"Finished processing data from file \"{filePath}\"."); } catch (Exception ex) { // There seems to be a problem here where the outer exception's call stack // was overwritten by the call stack of the point where it was thrown ExceptionDispatchInfo exInfo = ExceptionDispatchInfo.Capture(ex); try { // Attempt to set the error flag on the file group if ((object)fileGroup != null) fileGroup.Error = 1; } catch (Exception fileGroupError) { // Log any exceptions that occur when attempting to set the error flag on the file group string message = $"Exception occurred setting error flag on file group: {fileGroupError.Message}"; OnProcessException(new Exception(message, fileGroupError)); } // Throw the original exception exInfo.Throw(); } finally { if ((object)fileGroup != null) { try { // Attempt to set the processing end time of the file group fileGroup.ProcessingEndTime = TimeZoneInfo.ConvertTimeFromUtc(DateTime.UtcNow, systemSettings.XDATimeZoneInfo); dbAdapterContainer.GetAdapter<FileInfoDataContext>().SubmitChanges(); } catch (Exception ex) { // Log any exceptions that occur when attempting to set processing end time on the file group string message = $"Exception occurred setting processing end time on file group: {ex.Message}"; OnProcessException(new Exception(message, ex)); } } // Keep track of the meters and files currently being processed if ((object)meterKey != null) m_activeFiles.TryRemove(meterKey, out filePath); ThreadContext.Properties.Remove("Meter"); } } }
public static List<FlotSeries> GetFlotInfo(int eventID) { using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultLocationInfoDataContext faultInfo = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); if ((object)eventRow == null) return new List<FlotSeries>(); List<Series> waveformInfo = GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID); var lookup = waveformInfo .Where(info => info.Channel.MeasurementCharacteristic.Name == "Instantaneous") .Where(info => new string[] { "Instantaneous", "Values" }.Contains(info.SeriesType.Name)) .Select(info => new { MeasurementType = info.Channel.MeasurementType.Name, Phase = info.Channel.Phase.Name }) .Distinct() .ToDictionary(info => info); IEnumerable<FlotSeries> cycleDataInfo = CycleDataInfo .Where(info => lookup.ContainsKey(new { info.MeasurementType, info.Phase })) .Select(info => info.Clone()); return GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID) .Select(ToFlotSeries) .Concat(cycleDataInfo) .Concat(GetFaultCurveInfo(connection, eventID).Select(ToFlotSeries)) .ToList(); } }
// Validates the file before invoking the file processing handler. // Improves file processor performance by executing the filter in // parallel and also by bypassing the set of processed files. private bool PrevalidateFile(string filePath) { try { string meterKey; string connectionString; SystemSettings systemSettings; connectionString = LoadSystemSettings(); systemSettings = new SystemSettings(connectionString); ValidateFileCreationTime(filePath, systemSettings.MaxFileCreationTimeOffset); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { meterKey = GetMeterKey(filePath, systemSettings.FilePattern); ValidateMeterKey(filePath, meterKey, dbAdapterContainer.GetAdapter<MeterInfoDataContext>()); } return true; } catch (FileSkippedException ex) { // This method may be called if the file was deleted, // in which case the user almost certainly doesn't care // why it was skipped for processing and logging the // error would only cause confusion if (File.Exists(filePath)) Log.Warn(ex.Message); return false; } }
/// <summary> /// Processes data not yet processed /// by this SandBox instance. /// </summary> private void ProcessLatestDataOperation() { string latestDataFile = FilePath.GetAbsolutePath(@"LatestData.bin"); int latestFileGroupID; FileInfoDataContext fileInfo; List<int> newFileGroups; if ((object)m_systemSettings == null) ReloadSystemSettings(); using (FileBackedDictionary<string, int> dictionary = new FileBackedDictionary<string, int>(latestDataFile)) using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(m_systemSettings.DbConnectionString, m_systemSettings.DbTimeout)) { fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); do { dictionary.Compact(); if (!dictionary.TryGetValue("latestFileGroupID", out latestFileGroupID)) latestFileGroupID = 0; newFileGroups = fileInfo.FileGroups .Select(fileGroup => fileGroup.ID) .Where(id => id > latestFileGroupID) .Take(100) .OrderBy(id => id) .ToList(); foreach (int fileGroupID in newFileGroups) { MeterDataProcessor processor = new MeterDataProcessor(LoadSystemSettings()); processor.ProcessFileGroup(fileGroupID); dictionary["latestFileGroupID"] = fileGroupID; } } while (newFileGroups.Count > 0); } }
public void ProcessMeterDataSets(List<MeterDataSet> meterDataSets, SystemSettings systemSettings, DbAdapterContainer dbAdapterContainer) { TimeZoneInfo xdaTimeZone; DateTime processingEndTime; try { foreach (MeterDataSet meterDataSet in meterDataSets) ProcessMeterData(meterDataSet, dbAdapterContainer); xdaTimeZone = systemSettings.XDATimeZoneInfo; processingEndTime = TimeZoneInfo.ConvertTimeFromUtc(DateTime.UtcNow, xdaTimeZone); foreach (MeterDataSet meterDataSet in meterDataSets) meterDataSet.FileGroup.ProcessingEndTime = processingEndTime; dbAdapterContainer.GetAdapter<FileInfoDataContext>().SubmitChanges(); } catch (Exception ex) { OnHandleException(ex); } }