private Dictionary <EventClassification, int> GetEventTypeLookup(DbAdapterContainer dbAdapterContainer) { MeterData.EventTypeDataTable eventTypeTable = new MeterData.EventTypeDataTable(); EventClassification eventClassification = default(EventClassification); foreach (EventClassification classification in Enum.GetValues(typeof(EventClassification))) { eventTypeTable.AddEventTypeRow(classification.ToString(), classification.ToString()); } BulkLoader bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " + "USING {1} AS Source " + "ON Source.Name = Target.Name " + "WHEN NOT MATCHED THEN " + " INSERT (Name, Description) " + " VALUES (Source.Name, Source.Description);"; bulkLoader.Load(eventTypeTable); dbAdapterContainer.GetAdapter <EventTypeTableAdapter>().Fill(eventTypeTable); return(Enumerable.Select(eventTypeTable .Where(row => Enum.TryParse(row.Name, out eventClassification)), row => Tuple.Create(eventClassification, row.ID)) .ToDictionary(tuple => tuple.Item1, tuple => tuple.Item2)); }
private List <MeterDataSet> LoadMeterDataSets(DbAdapterContainer dbAdapterContainer, FileGroup fileGroup) { List <MeterDataSet> meterDataSets = new List <MeterDataSet>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter <MeterInfoDataContext>(); EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter <EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = dbAdapterContainer.GetAdapter <EventDataTableAdapter>(); MeterData.EventDataTable eventTable = eventAdapter.GetDataByFileGroup(fileGroup.ID); MeterDataSet meterDataSet; DataGroup dataGroup; foreach (IGrouping <int, MeterData.EventRow> eventGroup in eventTable.GroupBy(evt => evt.MeterID)) { meterDataSet = new MeterDataSet(); meterDataSet.Meter = meterInfo.Meters.SingleOrDefault(meter => meter.ID == eventGroup.Key); foreach (MeterData.EventRow evt in eventGroup) { dataGroup = new DataGroup(); dataGroup.FromData(meterDataSet.Meter, eventDataAdapter.GetTimeDomainData(evt.EventDataID)); foreach (DataSeries dataSeries in dataGroup.DataSeries) { meterDataSet.DataSeries.Add(dataSeries); } } meterDataSets.Add(meterDataSet); } return(meterDataSets); }
protected void Page_Load(object sender, EventArgs e) { if (!IsPostBack) { if (Request["eventId"] != null) { postedEventId = Request["eventId"]; using (MeterDataQualitySummaryTableAdapter meterdataqualityAdapter = new DbAdapterContainer(connectionstring).GetAdapter <MeterDataQualitySummaryTableAdapter>()) using (MeterInfoDataContext meterInfo = new MeterInfoDataContext(connectionstring)) { try { DataQuality.MeterDataQualitySummaryRow theevent = meterdataqualityAdapter.GetDataByID(Convert.ToInt32(postedEventId)).First(); Meter themeter = meterInfo.Meters.Single(m => m.ID == theevent.MeterID); postedDate = theevent.Date.ToShortDateString(); postedMeterId = theevent.MeterID.ToString(); postedMeterName = themeter.Name; } catch (Exception ex) { postedDate = ""; postedEventId = ""; postedMeterId = ""; postedMeterName = ""; } finally { } } } } }
public FaultDataResource(DbAdapterContainer dbAdapterContainer) { m_dbAdapterContainer = dbAdapterContainer; m_faultLocationSettings = new FaultLocationSettings(); m_breakerSettings = new BreakerSettings(); m_faultLookup = new Dictionary <DataGroup, FaultGroup>(); }
public override void Load(DbAdapterContainer dbAdapterContainer) { MeterData.EventDataTable eventTable; Dictionary <EventKey, MeterData.EventRow> eventLookup; MeterData.EventRow eventRow; BulkLoader bulkLoader; eventTable = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(m_meterDataSet.FileGroup.ID); eventLookup = eventTable.Where(evt => evt.MeterID == m_meterDataSet.Meter.ID).ToDictionary(CreateEventKey); foreach (Tuple <EventKey, MeterData.BreakerOperationRow> breakerOperation in m_breakerOperations) { if (eventLookup.TryGetValue(breakerOperation.Item1, out eventRow)) { breakerOperation.Item2.EventID = eventRow.ID; m_breakerOperationTable.AddBreakerOperationRow(breakerOperation.Item2); } } bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.Load(m_breakerOperationTable); }
private Dictionary <BreakerOperationType, int> GetBreakerOperationTypeLookup(DbAdapterContainer dbAdapterContainer) { MeterData.BreakerOperationTypeDataTable breakerOperationTypeTable = new MeterData.BreakerOperationTypeDataTable(); BreakerOperationType breakerOperationType = default(BreakerOperationType); foreach (BreakerOperationType operationType in Enum.GetValues(typeof(BreakerOperationType))) { breakerOperationTypeTable.AddBreakerOperationTypeRow(operationType.ToString(), operationType.ToString()); } BulkLoader bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " + "USING {1} AS Source " + "ON Source.Name = Target.Name " + "WHEN NOT MATCHED THEN " + " INSERT (Name, Description) " + " VALUES (Source.Name, Source.Description);"; bulkLoader.Load(breakerOperationTypeTable); dbAdapterContainer.GetAdapter <BreakerOperationTypeTableAdapter>().Fill(breakerOperationTypeTable); return(breakerOperationTypeTable .Where(row => Enum.TryParse(row.Name, out breakerOperationType)).Select(row => Tuple.Create(breakerOperationType, row.ID)) .ToDictionary(tuple => tuple.Item1, tuple => tuple.Item2)); }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { CycleDataResource cycleDataResource; FaultDataResource faultDataResource; DataGroup dataGroup; VICycleDataGroup viCycleDataGroup; FaultGroup faultGroup; string rootFileName; string fileName; cycleDataResource = meterDataSet.GetResource(() => CycleDataResource.GetResource(meterDataSet, dbAdapterContainer)); faultDataResource = meterDataSet.GetResource(() => new FaultDataResource(dbAdapterContainer)); if (!Directory.Exists(m_resultsPath)) { Directory.CreateDirectory(m_resultsPath); } for (int i = 0; i < cycleDataResource.DataGroups.Count; i++) { dataGroup = cycleDataResource.DataGroups[i]; if (faultDataResource.FaultLookup.TryGetValue(dataGroup, out faultGroup)) { rootFileName = FilePath.GetFileNameWithoutExtension(meterDataSet.FilePath); fileName = string.Format("{0},{1:000},Line{2}.xml", rootFileName, i, dataGroup.Line.AssetKey); viCycleDataGroup = cycleDataResource.VICycleDataGroups[i]; WriteResults(meterDataSet, dataGroup, viCycleDataGroup, faultGroup.Faults, Path.Combine(m_resultsPath, fileName)); } } }
public override void Load(DbAdapterContainer dbAdapterContainer) { FaultLocationData.FaultGroupDataTable faultGroupTable; FaultLocationData.FaultSegmentDataTable faultSegmentTable; FaultLocationData.FaultCurveDataTable faultCurveTable; FaultLocationData.FaultSummaryDataTable faultSummaryTable; m_faultSummarizer.FillTables(dbAdapterContainer); faultGroupTable = m_faultSummarizer.FaultGroupTable; faultSegmentTable = m_faultSummarizer.FaultSegmentTable; faultCurveTable = m_faultSummarizer.FaultCurveTable; faultSummaryTable = m_faultSummarizer.FaultSummaryTable; if (faultSegmentTable.Count == 0 && faultCurveTable.Count == 0 && faultSummaryTable.Count == 0) { return; } Log.Info("Loading fault data into the database..."); BulkLoader bulkLoader = new BulkLoader(); bulkLoader.Connection = dbAdapterContainer.Connection; bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; bulkLoader.Load(faultGroupTable); bulkLoader.Load(faultSegmentTable); bulkLoader.Load(faultCurveTable); bulkLoader.Load(faultSummaryTable); Log.Info(string.Format("Loaded {0} faults into the database.", faultSummaryTable.Count)); }
private void ExecuteDataOperation(MeterDataSet meterDataSet, DbAdapterContainer dbAdapterContainer) { IDataOperation dataOperation = null; try { // Create the data operation dataOperation = new openEASSandBoxOperation(); // Provide system settings to the data operation ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, dataOperation); // Prepare for execution of the data operation dataOperation.Prepare(dbAdapterContainer); // Execute the data operation dataOperation.Execute(meterDataSet); // Load data from all data operations in a single transaction using (TransactionScope transactionScope = new TransactionScope(TransactionScopeOption.Required, GetTransactionOptions())) { dataOperation.Load(dbAdapterContainer); transactionScope.Complete(); } } finally { // ReSharper disable once SuspiciousTypeConversion.Global if ((object)dataOperation != null) { TryDispose(dataOperation as IDisposable); } } }
public override void Load(DbAdapterContainer dbAdapterContainer) { int resultsCount = 0; // Write analysis results to the database Log.InfoFormat("{0} results written to the database.", resultsCount); }
public override void Load(DbAdapterContainer dbAdapterContainer) { BulkLoader loader = new BulkLoader(); loader.Connection = dbAdapterContainer.Connection; loader.CommandTimeout = dbAdapterContainer.CommandTimeout; loader.Load(m_disturbanceSeverityTable); }
public override void Prepare(DbAdapterContainer dbAdapterContainer) { m_alarmTypeTable = new AlarmData.AlarmTypeDataTable(); m_alarmLogTable = new AlarmData.AlarmLogDataTable(); dbAdapterContainer.GetAdapter <AlarmTypeTableAdapter>().Fill(m_alarmTypeTable); m_dbAdapterContainer = dbAdapterContainer; }
public void FillTables(DbAdapterContainer dbAdapterContainer) { MeterData.EventDataTable eventTable; Dictionary <EventKey, MeterData.EventRow> eventLookup; MeterData.EventRow eventRow; eventTable = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(MeterDataSet.FileGroup.ID); eventLookup = eventTable .Where(evt => evt.MeterID == MeterDataSet.Meter.ID) .GroupBy(CreateEventKey) .ToDictionary(grouping => grouping.Key, grouping => { if (grouping.Count() > 1) { Log.Warn($"Duplicate event found for meter {MeterDataSet.Meter.AssetKey}: {string.Join(", ", grouping.Select(evt => evt.ID))}"); } return(grouping.First()); }); foreach (Tuple <EventKey, FaultLocationData.FaultGroupRow> faultGroup in m_faultGroupList) { if (eventLookup.TryGetValue(faultGroup.Item1, out eventRow)) { faultGroup.Item2.EventID = eventRow.ID; FaultGroupTable.AddFaultGroupRow(faultGroup.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultSegmentRow> faultSegment in m_faultSegmentList) { if (eventLookup.TryGetValue(faultSegment.Item1, out eventRow)) { faultSegment.Item2.EventID = eventRow.ID; FaultSegmentTable.AddFaultSegmentRow(faultSegment.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultCurveRow> faultCurve in m_faultCurveList) { if (eventLookup.TryGetValue(faultCurve.Item1, out eventRow)) { faultCurve.Item2.EventID = eventRow.ID; FaultCurveTable.AddFaultCurveRow(faultCurve.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultSummaryRow> faultSummary in m_faultSummaryList) { if (eventLookup.TryGetValue(faultSummary.Item1, out eventRow)) { faultSummary.Item2.EventID = eventRow.ID; FaultSummaryTable.AddFaultSummaryRow(faultSummary.Item2); } } }
public override void Load(DbAdapterContainer dbAdapterContainer) { BulkLoader loader = new BulkLoader(); loader.Connection = dbAdapterContainer.Connection; loader.CommandTimeout = dbAdapterContainer.CommandTimeout; loader.Load(m_doubleEndedFaultDistanceTable); loader.Load(m_faultCurveTable); }
private static int GetEmailCount(DbAdapterContainer dbAdapterContainer, int eventID) { using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { int timeout = dbAdapterContainer.CommandTimeout; string sql = "SELECT COUNT(*) FROM EventSentEmail WHERE EventID = {0}"; object[] parameters = { eventID }; return(connection.ExecuteScalar <int>(timeout: timeout, sqlFormat: sql, parameters: parameters)); } }
public override void Prepare(DbAdapterContainer dbAdapterContainer) { m_dbAdapterContainer = dbAdapterContainer; m_phaseLookup = new DataContextLookup <string, Phase>(dbAdapterContainer.GetAdapter <MeterInfoDataContext>(), phase => phase.Name); m_breakerOperationTable = new MeterData.BreakerOperationDataTable(); m_breakerOperations = new List <Tuple <EventKey, MeterData.BreakerOperationRow> >(); LoadBreakerOperationTypes(dbAdapterContainer); }
public ChartGenerator(DbAdapterContainer dbAdapterContainer, int eventID) { m_dbAdapterContainer = dbAdapterContainer; m_eventID = eventID; m_viDataGroup = new Lazy<VIDataGroup>(GetVIDataGroup); m_viCycleDataGroup = new Lazy<VICycleDataGroup>(GetVICycleDataGroup); m_faultCurveLookup = new Lazy<Dictionary<string, DataSeries>>(GetFaultCurveLookup); m_seriesLookup = new Dictionary<string, Lazy<DataSeries>>(StringComparer.OrdinalIgnoreCase); InitializeSeriesLookup(); }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { Initialize(this); foreach (EventRow evt in dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(meterDataSet.FileGroup.ID)) { if (GetEmailCount(dbAdapterContainer, evt.ID) == 0) { QueueEventID(evt.ID); } } }
public ChartGenerator(DbAdapterContainer dbAdapterContainer, int eventID) { m_dbAdapterContainer = dbAdapterContainer; m_eventID = eventID; m_viDataGroup = new Lazy <VIDataGroup>(GetVIDataGroup); m_viCycleDataGroup = new Lazy <VICycleDataGroup>(GetVICycleDataGroup); m_faultCurveLookup = new Lazy <Dictionary <string, DataSeries> >(GetFaultCurveLookup); m_seriesLookup = new Dictionary <string, Lazy <DataSeries> >(StringComparer.OrdinalIgnoreCase); InitializeSeriesLookup(); }
public override void Load(DbAdapterContainer dbAdapterContainer) { BulkLoader hourlySummaryLoader; BulkLoader channelNormalLoader; if (m_hourlySummaryTable.Count == 0 && m_channelNormalTable.Count == 0) { return; } Log.Info("Loading hourly summary data into the database..."); hourlySummaryLoader = new BulkLoader(); channelNormalLoader = new BulkLoader(); hourlySummaryLoader.Connection = dbAdapterContainer.Connection; hourlySummaryLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; channelNormalLoader.Connection = dbAdapterContainer.Connection; channelNormalLoader.CommandTimeout = dbAdapterContainer.CommandTimeout; hourlySummaryLoader.MergeTableFormat = "MERGE INTO {0} WITH (TABLOCK) AS Target " + "USING {1} AS Source " + "ON Source.ChannelID = Target.ChannelID AND Source.Time = Target.Time " + "WHEN MATCHED THEN " + " UPDATE SET " + " Maximum = CASE WHEN Target.ValidCount = 0 OR Source.Maximum > Target.Maximum THEN Source.Maximum ELSE Target.Maximum END, " + " Minimum = CASE WHEN Target.ValidCount = 0 OR Source.Minimum < Target.Minimum THEN Source.Minimum ELSE Target.Minimum END, " + " Average = CASE WHEN Target.ValidCount = 0 THEN Source.Average ELSE Target.Average * (CAST(Target.ValidCount AS FLOAT) / (Target.ValidCount + Source.ValidCount)) + Source.Average * (CAST(Source.ValidCount AS FLOAT) / (Target.ValidCount + Source.ValidCount)) END, " + " ValidCount = Source.ValidCount + Target.ValidCount, " + " InvalidCount = Source.InvalidCount + Target.InvalidCount " + "WHEN NOT MATCHED THEN " + " INSERT (ChannelID, Time, Maximum, Minimum, Average, ValidCount, InvalidCount) " + " VALUES (Source.ChannelID, Source.Time, Source.Maximum, Source.Minimum, Source.Average, Source.ValidCount, Source.InvalidCount);"; channelNormalLoader.MergeTableFormat = "MERGE INTO {0} AS Target " + "USING {1} AS Source " + "ON Target.ChannelID = Source.ChannelID " + "WHEN MATCHED THEN " + " UPDATE SET " + " Average = Target.Average * (CAST(Target.Count AS FLOAT) / (Target.Count + Source.Count)) + Source.Average * (CAST(Source.Count AS FLOAT) / (Target.Count + Source.Count)), " + " MeanSquare = Target.MeanSquare * (CAST(Target.Count AS FLOAT) / (Target.Count + Source.Count)) + Source.MeanSquare * (CAST(Source.Count AS FLOAT) / (Target.Count + Source.Count)), " + " StandardDeviation = Target.MeanSquare * (CAST(Target.Count AS FLOAT) / (Target.Count + Source.Count)) + Source.MeanSquare * (CAST(Source.Count AS FLOAT) / (Target.Count + Source.Count)) - POWER(Target.Average * (CAST(Target.Count AS FLOAT) / (Target.Count + Source.Count)) + Source.Average * (CAST(Source.Count AS FLOAT) / (Target.Count + Source.Count)), 2), " + " Count = Target.Count + Source.Count " + "WHEN NOT MATCHED THEN " + " INSERT (ChannelID, Average, MeanSquare, StandardDeviation, Count) " + " VALUES (Source.ChannelID, Source.Average, Source.MeanSquare, SQRT(Source.MeanSquare - Source.Average * Source.Average), Source.Count); "; hourlySummaryLoader.Load(m_hourlySummaryTable); channelNormalLoader.Load(m_channelNormalTable); Log.Info(string.Format("Loaded {0} hourly summary records into the database.", m_hourlySummaryTable.Count)); }
private void LoadBreakerOperationTypes(DbAdapterContainer dbAdapterContainer) { if ((object)s_breakerOperationTypeLookup == null) { lock (BreakerOperationTypeLookupLock) { if ((object)s_breakerOperationTypeLookup == null) { s_breakerOperationTypeLookup = GetBreakerOperationTypeLookup(dbAdapterContainer); } } } }
private void LoadEventTypes(DbAdapterContainer dbAdapterContainer) { if ((object)s_eventTypeLookup == null) { lock (s_eventTypeLock) { if ((object)s_eventTypeLookup == null) { s_eventTypeLookup = GetEventTypeLookup(dbAdapterContainer); } } } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { CycleDataResource cycleDataResource; FaultDataResource faultDataResource; DataGroup dataGroup; FaultGroup faultGroup; List <int> seriesIDs; EventDataSet eventDataSet; string rootFileName; string fileName; cycleDataResource = meterDataSet.GetResource(() => CycleDataResource.GetResource(meterDataSet, dbAdapterContainer)); faultDataResource = meterDataSet.GetResource(() => new FaultDataResource(dbAdapterContainer)); if (!Directory.Exists(m_resultsPath)) { Directory.CreateDirectory(m_resultsPath); } for (int i = 0; i < cycleDataResource.DataGroups.Count; i++) { dataGroup = cycleDataResource.DataGroups[i]; if (faultDataResource.FaultLookup.TryGetValue(dataGroup, out faultGroup)) { rootFileName = FilePath.GetFileNameWithoutExtension(meterDataSet.FilePath); fileName = string.Format("{0},{1:000},Line{2}.dat", rootFileName, i, dataGroup.Line.AssetKey); seriesIDs = dataGroup.DataSeries .Select(series => series.SeriesInfo.ID) .ToList(); eventDataSet = new EventDataSet() { ResultsPath = Path.Combine(m_resultsPath, fileName), MeterDataSet = meterDataSet, TimeZoneOffset = GetTimeZoneOffset(meterDataSet.Meter.TimeZone, dataGroup.StartTime), DataGroup = dataGroup, VICycleDataGroup = cycleDataResource.VICycleDataGroups[i], Faults = faultGroup.Faults, OutputChannels = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>().OutputChannels.Where(channel => seriesIDs.Contains(channel.SeriesID)).ToList() }; WriteResults(eventDataSet); } } }
public override void Prepare(DbAdapterContainer dbAdapterContainer) { if ((object)s_segmentTypeLookup == null) { lock (SegmentTypeLookupLock) { if ((object)s_segmentTypeLookup == null) { InitializeSegmentTypeLookup(dbAdapterContainer); } } } m_dbAdapterContainer = dbAdapterContainer; }
public override void Load(DbAdapterContainer dbAdapterContainer) { BulkLoader bulkLoader = new BulkLoader(); if (m_alarmLogTable.Count == 0) { return; } Log.Info("Loading alarm data into the database..."); bulkLoader.Connection = m_dbAdapterContainer.Connection; bulkLoader.CommandTimeout = m_dbAdapterContainer.CommandTimeout; bulkLoader.Load(m_alarmLogTable); Log.Info(string.Format("Loaded {0} alarm log records into the database...", m_alarmLogTable.Count)); }
private void InitializeSegmentTypeLookup(DbAdapterContainer dbAdapterContainer) { Func <string, SegmentType> segmentTypeFactory; FaultLocationInfoDataContext faultLocationInfo; faultLocationInfo = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>(); segmentTypeFactory = name => { SegmentType segmentType = new SegmentType() { Name = name, Description = name }; faultLocationInfo.SegmentTypes.InsertOnSubmit(segmentType); return(segmentType); }; s_segmentTypeLookup = faultLocationInfo.SegmentTypes .GroupBy(segmentType => segmentType.Name) .ToDictionary(grouping => grouping.Key, grouping => { if (grouping.Count() > 1) { Log.Warn($"Duplicate segment type found: {grouping.Key}"); } return(grouping.First()); }); s_segmentTypeLookup.GetOrAdd("Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("AN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CN Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("AB Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BC Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CA Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("ABG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("BCG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("CAG Fault", segmentTypeFactory); s_segmentTypeLookup.GetOrAdd("3-Phase Fault", segmentTypeFactory); faultLocationInfo.SubmitChanges(); }
public void FillTables(DbAdapterContainer dbAdapterContainer) { MeterData.EventDataTable eventTable; Dictionary <EventKey, MeterData.EventRow> eventLookup; MeterData.EventRow eventRow; eventTable = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(MeterDataSet.FileGroup.ID); eventLookup = eventTable.Where(evt => evt.MeterID == MeterDataSet.Meter.ID).ToDictionary(CreateEventKey); foreach (Tuple <EventKey, FaultLocationData.FaultGroupRow> faultGroup in m_faultGroupList) { if (eventLookup.TryGetValue(faultGroup.Item1, out eventRow)) { faultGroup.Item2.EventID = eventRow.ID; FaultGroupTable.AddFaultGroupRow(faultGroup.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultSegmentRow> faultSegment in m_faultSegmentList) { if (eventLookup.TryGetValue(faultSegment.Item1, out eventRow)) { faultSegment.Item2.EventID = eventRow.ID; FaultSegmentTable.AddFaultSegmentRow(faultSegment.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultCurveRow> faultCurve in m_faultCurveList) { if (eventLookup.TryGetValue(faultCurve.Item1, out eventRow)) { faultCurve.Item2.EventID = eventRow.ID; FaultCurveTable.AddFaultCurveRow(faultCurve.Item2); } } foreach (Tuple <EventKey, FaultLocationData.FaultSummaryRow> faultSummary in m_faultSummaryList) { if (eventLookup.TryGetValue(faultSummary.Item1, out eventRow)) { faultSummary.Item2.EventID = eventRow.ID; FaultSummaryTable.AddFaultSummaryRow(faultSummary.Item2); } } }
public override void Load(DbAdapterContainer dbAdapterContainer) { FaultLocationData.FaultGroupDataTable faultGroupTable; FaultLocationData.FaultSegmentDataTable faultSegmentTable; FaultLocationData.FaultCurveDataTable faultCurveTable; FaultLocationData.FaultSummaryDataTable faultSummaryTable; m_faultSummarizer.FillTables(dbAdapterContainer); faultGroupTable = m_faultSummarizer.FaultGroupTable; faultSegmentTable = m_faultSummarizer.FaultSegmentTable; faultCurveTable = m_faultSummarizer.FaultCurveTable; faultSummaryTable = m_faultSummarizer.FaultSummaryTable; if (faultSegmentTable.Count == 0 && faultCurveTable.Count == 0 && faultSummaryTable.Count == 0) { return; } Log.Info("Loading fault data into the database..."); using (SqlBulkCopy bulkCopy = new SqlBulkCopy(dbAdapterContainer.Connection)) { // Set timeout to infinite bulkCopy.BulkCopyTimeout = 0; // Submit fault groups to the database bulkCopy.DestinationTableName = faultGroupTable.TableName; bulkCopy.WriteToServer(faultGroupTable); // Submit fault segments to the database bulkCopy.DestinationTableName = faultSegmentTable.TableName; bulkCopy.WriteToServer(faultSegmentTable); // Submit fault curves to the database bulkCopy.DestinationTableName = faultCurveTable.TableName; bulkCopy.WriteToServer(faultCurveTable); // Submit fault summary records to the database bulkCopy.DestinationTableName = faultSummaryTable.TableName; bulkCopy.WriteToServer(faultSummaryTable); } Log.Info(string.Format("Loaded {0} faults into the database.", faultSummaryTable.Count)); }
// Static Methods private static void Initialize(EventEmailWriter writer) { bool configurationChanged = s_timeTolerance != writer.TimeTolerance || s_smtpServer != writer.EmailSettings.SMTPServer || s_fromAddress != writer.EmailSettings.FromAddress || s_username != writer.EmailSettings.Username || s_password != writer.EmailSettings.SecurePassword || s_enableSSL != writer.EmailSettings.EnableSSL || s_waitPeriod != TimeSpan.FromSeconds(writer.FaultEmailSettings.WaitPeriod) || s_timeZone.Id != writer.XDATimeZone; if (configurationChanged) { ProcessQueue.Add(() => { s_timeTolerance = writer.TimeTolerance; s_smtpServer = writer.EmailSettings.SMTPServer; s_fromAddress = writer.EmailSettings.FromAddress; s_username = writer.EmailSettings.Username; s_password = writer.EmailSettings.SecurePassword; s_enableSSL = writer.EmailSettings.EnableSSL; s_waitPeriod = TimeSpan.FromSeconds(writer.FaultEmailSettings.WaitPeriod); s_timeZone = TimeZoneInfo.FindSystemTimeZoneById(writer.XDATimeZone); }); } if ((object)s_dbAdapterContainer == null) { ProcessQueue.Add(() => { if ((object)s_dbAdapterContainer != null && s_dbAdapterContainer.Connection.State.HasFlag(ConnectionState.Open)) { return; } using (s_dbAdapterContainer) { s_dbAdapterContainer = new DbAdapterContainer(writer.DbConnectionString); } }); } }
public void Initialize(DbAdapterContainer dbAdapterContainer, VICycleDataGroup viCycleDataGroup, double systemFrequency) { int samplesPerCycle = Transform.CalculateSamplesPerCycle(viCycleDataGroup.VA.RMS, systemFrequency); FaultSegment faultSegment = dbAdapterContainer.GetAdapter <FaultLocationInfoDataContext>().FaultSegments .Where(segment => segment.EventID == Fault.EventID) .Where(segment => segment.StartTime == Fault.Inception) .FirstOrDefault(segment => segment.SegmentType.Name == "Fault"); if ((object)faultSegment != null) { StartSample = faultSegment.StartSample; EndSample = faultSegment.EndSample - samplesPerCycle + 1; CycleDataGroup = Rotate(viCycleDataGroup.ToSubSet(StartSample, EndSample)); DistanceCurve.StartIndex = StartSample; AngleCurve.StartIndex = StartSample; } }
public void Initialize(MeterDataSet meterDataSet, DbAdapterContainer dbAdapterContainer) { DataGroup dataGroup; VICycleDataGroup viCycleDataGroup; CycleDataResource cycleDataResource; ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, this); m_disturbances = new Dictionary <DataGroup, List <Disturbance> >(); cycleDataResource = CycleDataResource.GetResource(meterDataSet, dbAdapterContainer); for (int i = 0; i < cycleDataResource.DataGroups.Count; i++) { dataGroup = cycleDataResource.DataGroups[i]; viCycleDataGroup = cycleDataResource.VICycleDataGroups[i]; DetectDisturbances(dataGroup, viCycleDataGroup); } }
protected void Page_Load(object sender, EventArgs e) { if (!IsPostBack) { if (Request["eventId"] != null) { postedEventId = Request["eventId"]; using(MeterDataQualitySummaryTableAdapter meterdataqualityAdapter = new DbAdapterContainer(connectionstring).GetAdapter<MeterDataQualitySummaryTableAdapter>()) using (MeterInfoDataContext meterInfo = new MeterInfoDataContext(connectionstring)) { try { DataQuality.MeterDataQualitySummaryRow theevent = meterdataqualityAdapter.GetDataByID(Convert.ToInt32(postedEventId)).First(); Meter themeter = meterInfo.Meters.Single(m => m.ID == theevent.MeterID); postedDate = theevent.Date.ToShortDateString(); postedMeterId = theevent.MeterID.ToString(); postedMeterName = themeter.Name; } catch (Exception ex) { postedDate = ""; postedEventId = ""; postedMeterId = ""; postedMeterName = ""; } finally { } } } } }
// Instantiates and executes data operations and data writers to process the meter data set. private void ProcessMeterDataSet(MeterDataSet meterDataSet, SystemSettings systemSettings, DbAdapterContainer dbAdapterContainer) { SystemInfoDataContext systemInfo; List<DataOperationWrapper> dataOperations = null; List<DataWriterWrapper> dataWriters = null; // Get the SystemInfoDataContext from the dbAdapterContainer systemInfo = dbAdapterContainer.GetAdapter<SystemInfoDataContext>(); // Load the data operations from the database, // in descending order so we can remove records while we iterate dataOperations = systemInfo.DataOperations .OrderByDescending(dataOperation => dataOperation.LoadOrder) .Select(Wrap) .Where(wrapper => (object)wrapper != null) .ToList(); for (int i = dataOperations.Count - 1; i >= 0; i--) { try { Log.Debug($"Preparing data operation '{dataOperations[i].DataObject.GetType().Name}' for execution..."); // Load configuration parameters from the connection string into the data operation ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, dataOperations[i].DataObject); // Call the prepare method to allow the data operation to prepare any data it needs from the database dataOperations[i].DataObject.Prepare(dbAdapterContainer); Log.Debug($"Finished preparing data operation '{dataOperations[i].DataObject.GetType().Name}' for execution."); } catch (Exception ex) { // Log the error and remove the data operation from the list string message = $"An error occurred while preparing data from meter '{meterDataSet.Meter.AssetKey}' for data operation of type '{dataOperations[i].DataObject.GetType().FullName}': {ex.Message}"; OnProcessException(new Exception(message, ex)); dataOperations[i].Dispose(); dataOperations.RemoveAt(i); } } for (int i = dataOperations.Count - 1; i >= 0; i--) { try { Log.Debug($"Executing data operation '{dataOperations[i].DataObject.GetType().Name}'..."); // Call the execute method on the data operation to perform in-memory data transformations dataOperations[i].DataObject.Execute(meterDataSet); Log.Debug($"Finished execurting data operation '{dataOperations[i].DataObject.GetType().Name}'."); } catch (Exception ex) { // Log the error and skip to the next data operation string message = $"An error occurred while executing data operation of type '{dataOperations[i].DataObject.GetType().FullName}' on data from meter '{meterDataSet.Meter.AssetKey}': {ex.Message}"; OnProcessException(new Exception(message, ex)); continue; } try { Log.Debug($"Loading data from data operation '{dataOperations[i].DataObject.GetType().Name}' into database..."); // Call the load method inside a transaction to load data into from the data operation into the database using (TransactionScope transaction = new TransactionScope(TransactionScopeOption.Required, GetTransactionOptions())) { dataOperations[i].DataObject.Load(dbAdapterContainer); transaction.Complete(); } Log.Debug($"Finished loading data from data operation '{dataOperations[i].DataObject.GetType().Name}' into database."); } catch (Exception ex) { // Log the error and move on to the next data operation string message = $"An error occurred while loading data from data operation of type '{dataOperations[i].DataObject.GetType().FullName}' for data from meter '{meterDataSet.Meter.AssetKey}': {ex.Message}"; OnProcessException(new Exception(message, ex)); } } // All data operations are complete, but we still need to clean up for (int i = dataOperations.Count - 1; i >= 0; i--) dataOperations[i].Dispose(); // Load the data writers from the database dataWriters = systemInfo.DataWriters .OrderBy(dataWriter => dataWriter.LoadOrder) .Select(Wrap) .Where(wrapper => (object)wrapper != null) .ToList(); foreach (DataWriterWrapper dataWriter in dataWriters) { try { Log.Debug($"Writing results to external location with data writer '{dataWriter.DataObject.GetType().Name}'..."); // Load configuration parameters from the connection string into the data writer ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, dataWriter.DataObject); // Write the results to the data writer's destination by calling the WriteResults method dataWriter.DataObject.WriteResults(dbAdapterContainer, meterDataSet); Log.Debug($"Finished writing results with data writer '{dataWriter.DataObject.GetType().Name}'."); } catch (Exception ex) { // Log the error and move on to the next data writer string message = $"An error occurred while writing data from meter '{meterDataSet.Meter.AssetKey}' using data writer of type '{dataWriter.DataObject.GetType().FullName}': {ex.Message}"; OnProcessException(new Exception(message, ex)); } } // All data writers are complete, but we still need to clean up foreach (DataWriterWrapper dataWriter in dataWriters) dataWriter.Dispose(); }
// Updates the Filter property of the FileProcessor with the // latest collection of filters from the DataReader table. private void UpdateFileProcessorFilter(SystemSettings systemSettings) { SystemInfoDataContext systemInfo; List<string> filterPatterns; // Do not attempt to load filter patterns if file processor is not defined if ((object)m_fileProcessor == null) return; // Get the list of file extensions to be processed by openXDA using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { systemInfo = dbAdapterContainer.GetAdapter<SystemInfoDataContext>(); filterPatterns = systemInfo.DataReaders .Select(reader => reader.FilePattern) .ToList(); } m_fileProcessor.Filter = string.Join(Path.PathSeparator.ToString(), filterPatterns); }
// Validates the file before invoking the file processing handler. // Improves file processor performance by executing the filter in // parallel and also by bypassing the set of processed files. private bool PrevalidateFile(string filePath) { try { string meterKey; string connectionString; SystemSettings systemSettings; connectionString = LoadSystemSettings(); systemSettings = new SystemSettings(connectionString); ValidateFileCreationTime(filePath, systemSettings.MaxFileCreationTimeOffset); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { meterKey = GetMeterKey(filePath, systemSettings.FilePattern); ValidateMeterKey(filePath, meterKey, dbAdapterContainer.GetAdapter<MeterInfoDataContext>()); } return true; } catch (FileSkippedException ex) { // This method may be called if the file was deleted, // in which case the user almost certainly doesn't care // why it was skipped for processing and logging the // error would only cause confusion if (File.Exists(filePath)) Log.Warn(ex.Message); return false; } }
// Processes the file to determine if it can be parsed and kicks off the meter's processing thread. private void ProcessFile(FileProcessorEventArgs fileProcessorArgs, string connectionString, SystemSettings systemSettings, DbAdapterContainer dbAdapterContainer) { string filePath; string meterKey; FileInfoDataContext fileInfo; SystemInfoDataContext systemInfo; DataReader dataReader; DataReaderWrapper dataReaderWrapper; FileWrapper fileWrapper; int queuedFileCount; filePath = fileProcessorArgs.FullPath; fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); // Determine whether the file has already been // processed or needs to be processed again if (fileProcessorArgs.AlreadyProcessed) { DataFile dataFile = fileInfo.DataFiles .Where(file => file.FilePathHash == filePath.GetHashCode()) .Where(file => file.FilePath == filePath) .MaxBy(file => file.ID); // This will tell us whether the service was stopped in the middle // of processing the last time it attempted to process the file if ((object)dataFile != null && dataFile.FileGroup.ProcessingEndTime > DateTime.MinValue) { Log.Debug($"Skipped file \"{filePath}\" because it has already been processed."); return; } } // Get the data reader that will be used to parse the file systemInfo = dbAdapterContainer.GetAdapter<SystemInfoDataContext>(); dataReader = systemInfo.DataReaders .OrderBy(reader => reader.LoadOrder) .AsEnumerable() .FirstOrDefault(reader => FilePath.IsFilePatternMatch(reader.FilePattern, filePath, true)); if ((object)dataReader == null) { // Because the file processor is filtering files based on the DataReader file patterns, // this should only ever occur if the configuration changes during runtime UpdateFileProcessorFilter(systemSettings); throw new FileSkippedException($"Skipped file \"{filePath}\" because no data reader could be found to process the file."); } dataReaderWrapper = Wrap(dataReader); try { meterKey = null; // Determine whether the database contains configuration information for the meter that produced this file if ((object)dataReaderWrapper.DataObject.MeterDataSet != null) meterKey = GetMeterKey(filePath, systemSettings.FilePattern); // Apply connection string settings to the data reader ConnectionStringParser.ParseConnectionString(connectionString, dataReaderWrapper.DataObject); // Get the file wrapper from the lookup table fileWrapper = m_fileWrapperLookup.GetOrAdd(filePath, path => new FileWrapper(path)); // Determine whether the dataReader can parse the file if (!dataReaderWrapper.DataObject.CanParse(filePath, fileWrapper.GetMaxFileCreationTime())) { fileProcessorArgs.Requeue = true; dataReaderWrapper.Dispose(); return; } // Get the thread used to process this data GetThread(meterKey).Push(() => ParseFile(connectionString, systemSettings, filePath, meterKey, dataReaderWrapper, fileWrapper)); // Keep track of the number of operations in thread queues queuedFileCount = Interlocked.Increment(ref m_queuedFileCount); while (!m_stopped && !m_disposed && m_queuedFileCount >= systemSettings.MaxQueuedFileCount) Thread.Sleep(1000); } catch { // If an error occurs here, dispose of the data reader; // otherwise, the meter data thread will handle it dataReaderWrapper.Dispose(); throw; } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { Initialize(this); foreach (EventRow evt in dbAdapterContainer.GetAdapter<EventTableAdapter>().GetDataByFileGroup(meterDataSet.FileGroup.ID)) { if (GetEmailCount(dbAdapterContainer, evt.ID) == 0) QueueEventID(evt.ID); } }
// Parses the file on the meter's processing thread and kicks off processing of the meter data set. private void ParseFile(string connectionString, SystemSettings systemSettings, string filePath, string meterKey, DataReaderWrapper dataReaderWrapper, FileWrapper fileWrapper) { FileGroup fileGroup = null; MeterDataSet meterDataSet; int queuedFileCount; // Keep track of the number of operations in thread queues queuedFileCount = Interlocked.Decrement(ref m_queuedFileCount); if (m_stopped || m_disposed) { dataReaderWrapper.Dispose(); return; } using (dataReaderWrapper) using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { try { // Keep track of the meters and files currently being processed if ((object)meterKey != null) m_activeFiles[meterKey] = filePath; ThreadContext.Properties["Meter"] = meterKey; // Create the file group fileGroup = fileWrapper.GetFileGroup(dbAdapterContainer.GetAdapter<FileInfoDataContext>(), systemSettings.XDATimeZoneInfo); // Parse the file to turn it into a meter data set OnStatusMessage($"Parsing data from file \"{filePath}\"..."); dataReaderWrapper.DataObject.Parse(filePath); OnStatusMessage($"Finished parsing data from file \"{filePath}\"."); meterDataSet = dataReaderWrapper.DataObject.MeterDataSet; // If the data reader does not return a data set, // there is nothing left to do if ((object)meterDataSet == null) return; // Data reader has finally outlived its usefulness dataReaderWrapper.Dispose(); // Set file path, file group, connection string, // and meter asset key for the meter data set meterDataSet.FilePath = filePath; meterDataSet.FileGroup = fileGroup; meterDataSet.ConnectionString = connectionString; meterDataSet.Meter.AssetKey = meterKey; // Shift date/time values to the configured time zone and set the start and end time values on the file group ShiftTime(meterDataSet, meterDataSet.Meter.GetTimeZoneInfo(systemSettings.DefaultMeterTimeZoneInfo), systemSettings.XDATimeZoneInfo); SetDataTimeRange(meterDataSet, dbAdapterContainer.GetAdapter<FileInfoDataContext>()); // Determine whether the file duration is within a user-defined maximum tolerance ValidateFileDuration(meterDataSet.FilePath, systemSettings.MaxFileDuration, meterDataSet.FileGroup); // Determine whether the timestamps in the file extend beyond user-defined thresholds ValidateFileTimestamps(meterDataSet.FilePath, meterDataSet.FileGroup, systemSettings, dbAdapterContainer.GetAdapter<FileInfoDataContext>()); // Process the meter data set OnStatusMessage($"Processing meter data from file \"{filePath}\"..."); ProcessMeterDataSet(meterDataSet, systemSettings, dbAdapterContainer); OnStatusMessage($"Finished processing data from file \"{filePath}\"."); } catch (Exception ex) { // There seems to be a problem here where the outer exception's call stack // was overwritten by the call stack of the point where it was thrown ExceptionDispatchInfo exInfo = ExceptionDispatchInfo.Capture(ex); try { // Attempt to set the error flag on the file group if ((object)fileGroup != null) fileGroup.Error = 1; } catch (Exception fileGroupError) { // Log any exceptions that occur when attempting to set the error flag on the file group string message = $"Exception occurred setting error flag on file group: {fileGroupError.Message}"; OnProcessException(new Exception(message, fileGroupError)); } // Throw the original exception exInfo.Throw(); } finally { if ((object)fileGroup != null) { try { // Attempt to set the processing end time of the file group fileGroup.ProcessingEndTime = TimeZoneInfo.ConvertTimeFromUtc(DateTime.UtcNow, systemSettings.XDATimeZoneInfo); dbAdapterContainer.GetAdapter<FileInfoDataContext>().SubmitChanges(); } catch (Exception ex) { // Log any exceptions that occur when attempting to set processing end time on the file group string message = $"Exception occurred setting processing end time on file group: {ex.Message}"; OnProcessException(new Exception(message, ex)); } } // Keep track of the meters and files currently being processed if ((object)meterKey != null) m_activeFiles.TryRemove(meterKey, out filePath); ThreadContext.Properties.Remove("Meter"); } } }
public static List<FlotSeries> GetFlotInfo(int eventID) { using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultLocationInfoDataContext faultInfo = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); if ((object)eventRow == null) return new List<FlotSeries>(); List<Series> waveformInfo = GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID); var lookup = waveformInfo .Where(info => info.Channel.MeasurementCharacteristic.Name == "Instantaneous") .Where(info => new string[] { "Instantaneous", "Values" }.Contains(info.SeriesType.Name)) .Select(info => new { MeasurementType = info.Channel.MeasurementType.Name, Phase = info.Channel.Phase.Name }) .Distinct() .ToDictionary(info => info); IEnumerable<FlotSeries> cycleDataInfo = CycleDataInfo .Where(info => lookup.ContainsKey(new { info.MeasurementType, info.Phase })) .Select(info => info.Clone()); return GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID) .Select(ToFlotSeries) .Concat(cycleDataInfo) .Concat(GetFaultCurveInfo(connection, eventID).Select(ToFlotSeries)) .ToList(); } }
private List<MeterDataSet> LoadMeterDataSets(DbAdapterContainer dbAdapterContainer, FileGroup fileGroup) { List<MeterDataSet> meterDataSets = new List<MeterDataSet>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = dbAdapterContainer.GetAdapter<EventDataTableAdapter>(); MeterData.EventDataTable eventTable = eventAdapter.GetDataByFileGroup(fileGroup.ID); MeterDataSet meterDataSet; DataGroup dataGroup; foreach (IGrouping<int, MeterData.EventRow> eventGroup in eventTable.GroupBy(evt => evt.MeterID)) { meterDataSet = new MeterDataSet(); meterDataSet.Meter = meterInfo.Meters.SingleOrDefault(meter => meter.ID == eventGroup.Key); foreach (MeterData.EventRow evt in eventGroup) { dataGroup = new DataGroup(); dataGroup.FromData(meterDataSet.Meter, eventDataAdapter.GetTimeDomainData(evt.EventDataID)); foreach (DataSeries dataSeries in dataGroup.DataSeries) meterDataSet.DataSeries.Add(dataSeries); } meterDataSets.Add(meterDataSet); } return meterDataSets; }
/// <summary> /// Reloads system configuration from configuration sources. /// </summary> public void ReloadConfiguration() { SystemInfoDataContext systemInfo; List<Type> types; string connectionString; IConfigurationLoader configurationLoader; // If system settings is null, // attempt to reload system settings if ((object)m_systemSettings == null) ReloadSystemSettings(); // If system settings is still null, give up if ((object)m_systemSettings == null) return; using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(m_systemSettings.DbConnectionString)) { systemInfo = dbAdapterContainer.GetAdapter<SystemInfoDataContext>(); types = systemInfo.ConfigurationLoaders .OrderBy(configLoader => configLoader.LoadOrder) .AsEnumerable() .Select(configLoader => LoadType(configLoader.AssemblyName, configLoader.TypeName)) .Where(type => (object)type != null) .Where(type => typeof(IConfigurationLoader).IsAssignableFrom(type)) .Where(type => (object)type.GetConstructor(Type.EmptyTypes) != null) .ToList(); connectionString = LoadSystemSettings(systemInfo); foreach (Type type in types) { try { OnStatusMessage("[{0}] Loading configuration...", type.Name); // Create an instance of the configuration loader configurationLoader = (IConfigurationLoader)Activator.CreateInstance(type); // Use the connection string parser to load system settings into the configuration loader ConnectionStringParser.ParseConnectionString(connectionString, configurationLoader); // Update configuration by calling the configuration loader's UpdateConfiguration method configurationLoader.UpdateConfiguration(dbAdapterContainer); OnStatusMessage("[{0}] Done loading configuration.", type.Name); } catch (Exception ex) { string message = string.Format("[{0}] Unable to update configuration due to exception: {1}", type.Name, ex.Message); OnProcessException(new InvalidOperationException(message, ex)); } } } }
// Static Methods public static Stream ConvertToChartImageStream(DbAdapterContainer dbAdapterContainer, XElement chartElement) { ChartGenerator chartGenerator; Lazy<DataRow> faultSummary; Lazy<double> systemFrequency; DateTime inception; DateTime clearing; int width; int height; double prefaultCycles; double postfaultCycles; string title; List<string> keys; List<string> names; DateTime startTime; DateTime endTime; int eventID; int faultID; // Read parameters from the XML data and set up defaults eventID = Convert.ToInt32((string)chartElement.Attribute("eventID") ?? "-1"); faultID = Convert.ToInt32((string)chartElement.Attribute("faultID") ?? "-1"); prefaultCycles = Convert.ToDouble((string)chartElement.Attribute("prefaultCycles") ?? "NaN"); postfaultCycles = Convert.ToDouble((string)chartElement.Attribute("postfaultCycles") ?? "NaN"); title = (string)chartElement.Attribute("yAxisTitle"); keys = GetKeys(chartElement); names = GetNames(chartElement); width = Convert.ToInt32((string)chartElement.Attribute("width")); height = Convert.ToInt32((string)chartElement.Attribute("height")); startTime = DateTime.MinValue; endTime = DateTime.MaxValue; using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { faultSummary = new Lazy<DataRow>(() => connection.RetrieveData("SELECT * FROM FaultSummary WHERE ID = {0}", faultID).Select().FirstOrDefault()); systemFrequency = new Lazy<double>(() => connection.ExecuteScalar(60.0D, "SELECT Value FROM Setting WHERE Name = 'SystemFrequency'")); // If prefaultCycles is specified and we have a fault summary we can use, // we can determine the start time of the chart based on fault inception if (!double.IsNaN(prefaultCycles) && (object)faultSummary.Value != null) { inception = faultSummary.Value.ConvertField<DateTime>("Inception"); startTime = inception.AddSeconds(-prefaultCycles / systemFrequency.Value); } // If postfaultCycles is specified and we have a fault summary we can use, // we can determine the start time of the chart based on fault clearing if (!double.IsNaN(postfaultCycles) && (object)faultSummary.Value != null) { inception = faultSummary.Value.ConvertField<DateTime>("Inception"); clearing = inception.AddSeconds(faultSummary.Value.ConvertField<double>("DurationSeconds")); endTime = clearing.AddSeconds(postfaultCycles / systemFrequency.Value); } // Create the chart generator to generate the chart chartGenerator = new ChartGenerator(dbAdapterContainer, eventID); using (Chart chart = chartGenerator.GenerateChart(title, keys, names, startTime, endTime)) { // Set the chart size based on the specified width and height; // this allows us to dynamically change font sizes and line // widths before converting the chart to an image SetChartSize(chart, width, height); // Determine if either the minimum or maximum of the y-axis is specified explicitly if ((object)chartElement.Attribute("yAxisMaximum") != null) chart.ChartAreas[0].AxisY.Maximum = Convert.ToDouble((string)chartElement.Attribute("yAxisMaximum")); if ((object)chartElement.Attribute("yAxisMinimum") != null) chart.ChartAreas[0].AxisY.Minimum = Convert.ToDouble((string)chartElement.Attribute("yAxisMinimum")); // If the calculation cycle is to be highlighted, determine whether the highlight should be in the range of a single index or a full cycle. // If we have a fault summary we can use, apply the appropriate highlight based on the calculation cycle if (string.Equals((string)chartElement.Attribute("highlightCalculation"), "index", StringComparison.OrdinalIgnoreCase)) { if ((object)faultSummary.Value != null) { int calculationCycle = faultSummary.Value.ConvertField<int>("CalculationCycle"); DateTime calculationTime = chartGenerator.ToDateTime(calculationCycle); double calculationPosition = chart.ChartAreas[0].AxisX.Minimum + (calculationTime - startTime).TotalSeconds; chart.ChartAreas[0].CursorX.Position = calculationPosition; } } else if (string.Equals((string)chartElement.Attribute("highlightCalculation"), "cycle", StringComparison.OrdinalIgnoreCase)) { if ((object)faultSummary.Value != null) { int calculationCycle = faultSummary.Value.ConvertField<int>("CalculationCycle"); DateTime calculationTime = chartGenerator.ToDateTime(calculationCycle); double calculationPosition = chart.ChartAreas[0].AxisX.Minimum + (calculationTime - startTime).TotalSeconds; chart.ChartAreas[0].CursorX.SelectionStart = calculationPosition; chart.ChartAreas[0].CursorX.SelectionEnd = calculationPosition + 1.0D / 60.0D; } } // Convert the generated chart to an image return ConvertToImageStream(chart, ChartImageFormat.Png); } } }
public void ProcessMeterDataSets(List<MeterDataSet> meterDataSets, SystemSettings systemSettings, DbAdapterContainer dbAdapterContainer) { TimeZoneInfo xdaTimeZone; DateTime processingEndTime; try { foreach (MeterDataSet meterDataSet in meterDataSets) ProcessMeterData(meterDataSet, dbAdapterContainer); xdaTimeZone = systemSettings.XDATimeZoneInfo; processingEndTime = TimeZoneInfo.ConvertTimeFromUtc(DateTime.UtcNow, xdaTimeZone); foreach (MeterDataSet meterDataSet in meterDataSets) meterDataSet.FileGroup.ProcessingEndTime = processingEndTime; dbAdapterContainer.GetAdapter<FileInfoDataContext>().SubmitChanges(); } catch (Exception ex) { OnHandleException(ex); } }
private void ExecuteDataOperation(MeterDataSet meterDataSet, DbAdapterContainer dbAdapterContainer) { IDataOperation dataOperation = null; try { // Create the data operation dataOperation = new openEASSandBoxOperation(); // Provide system settings to the data operation ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, dataOperation); // Prepare for execution of the data operation dataOperation.Prepare(dbAdapterContainer); // Execute the data operation dataOperation.Execute(meterDataSet); // Load data from all data operations in a single transaction using (TransactionScope transactionScope = new TransactionScope(TransactionScopeOption.Required, GetTransactionOptions())) { dataOperation.Load(dbAdapterContainer); transactionScope.Complete(); } } finally { // ReSharper disable once SuspiciousTypeConversion.Global if ((object)dataOperation != null) TryDispose(dataOperation as IDisposable); } }
// Static Methods private static void Initialize(EventEmailWriter writer) { bool configurationChanged = s_timeTolerance != writer.TimeTolerance || s_smtpServer != writer.EmailSettings.SMTPServer || s_fromAddress != writer.EmailSettings.FromAddress || s_username != writer.EmailSettings.Username || s_password != writer.EmailSettings.SecurePassword || s_enableSSL != writer.EmailSettings.EnableSSL || s_waitPeriod != TimeSpan.FromSeconds(writer.FaultEmailSettings.WaitPeriod) || s_timeZone.Id != writer.XDATimeZone; if (configurationChanged) { ProcessQueue.Add(() => { s_timeTolerance = writer.TimeTolerance; s_smtpServer = writer.EmailSettings.SMTPServer; s_fromAddress = writer.EmailSettings.FromAddress; s_username = writer.EmailSettings.Username; s_password = writer.EmailSettings.SecurePassword; s_enableSSL = writer.EmailSettings.EnableSSL; s_waitPeriod = TimeSpan.FromSeconds(writer.FaultEmailSettings.WaitPeriod); s_timeZone = TimeZoneInfo.FindSystemTimeZoneById(writer.XDATimeZone); }); } if ((object)s_dbAdapterContainer == null) { ProcessQueue.Add(() => { if ((object)s_dbAdapterContainer == null) s_dbAdapterContainer = new DbAdapterContainer(writer.DbConnectionString); }); } }
private void ExecuteDataWriters(MeterDataSet meterDataSet, DbAdapterContainer dbAdapterContainer) { IDataWriter dataWriter = null; try { // Create the data writer dataWriter = new openEASSandBoxWriter(); // Provide system settings to the data operation ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, dataWriter); // Prepare for execution of the data operation dataWriter.WriteResults(dbAdapterContainer, meterDataSet); } finally { // ReSharper disable once SuspiciousTypeConversion.Global if ((object)dataWriter != null) TryDispose(dataWriter as IDisposable); } }
private static int GetEmailCount(DbAdapterContainer dbAdapterContainer, int eventID) { using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { int timeout = dbAdapterContainer.CommandTimeout; string sql = "SELECT COUNT(*) FROM EventSentEmail WHERE EventID = {0}"; object[] parameters = { eventID }; return connection.ExecuteScalar<int>(timeout: timeout, sqlFormat: sql, parameters: parameters); } }
// Called when the file processor has picked up a file in one of the watch // directories. This handler validates the file and processes it if able. private void FileProcessor_Processing(object sender, FileProcessorEventArgs fileProcessorEventArgs) { if (m_disposed) return; try { string filePath; string connectionString; SystemSettings systemSettings; filePath = fileProcessorEventArgs.FullPath; if (!FilePath.TryGetReadLockExclusive(filePath)) { fileProcessorEventArgs.Requeue = true; return; } connectionString = LoadSystemSettings(); systemSettings = new SystemSettings(connectionString); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { try { ProcessFile( fileProcessorArgs: fileProcessorEventArgs, connectionString: connectionString, systemSettings: systemSettings, dbAdapterContainer: dbAdapterContainer); } catch (Exception ex) { // There may be a problem here where the outer exception's call stack // was overwritten by the call stack of the point where it was thrown ExceptionDispatchInfo exInfo = ExceptionDispatchInfo.Capture(ex); try { // Attempt to set the error flag on the file group FileInfoDataContext fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); FileWrapper fileWrapper = m_fileWrapperLookup.GetOrAdd(filePath, path => new FileWrapper(path)); FileGroup fileGroup = fileWrapper.GetFileGroup(fileInfo, systemSettings.XDATimeZoneInfo); fileGroup.ProcessingEndTime = fileGroup.ProcessingStartTime; fileGroup.Error = 1; fileInfo.SubmitChanges(); } catch (Exception fileGroupError) { // Log exceptions that occur when setting the error flag on the file group string message = $"Exception occurred setting error flag on file group: {fileGroupError.Message}"; OnProcessException(new Exception(message, fileGroupError)); } // Throw the original exception exInfo.Throw(); } } } catch (FileSkippedException) { // Do not wrap FileSkippedExceptions because // these only generate warning messages throw; } catch (Exception ex) { // Wrap all other exceptions to include the file path in the message string message = $"Exception occurred processing file \"{fileProcessorEventArgs.FullPath}\": {ex.Message}"; throw new Exception(message, ex); } finally { // Make sure to clean up file wrappers from // the lookup table to prevent memory leaks if (!fileProcessorEventArgs.Requeue) m_fileWrapperLookup.Remove(fileProcessorEventArgs.FullPath); } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { CycleDataResource cycleDataResource; FaultDataResource faultDataResource; DataGroup dataGroup; FaultGroup faultGroup; List<int> seriesIDs; EventDataSet eventDataSet; string rootFileName; string fileName; cycleDataResource = meterDataSet.GetResource(() => CycleDataResource.GetResource(meterDataSet, dbAdapterContainer)); faultDataResource = meterDataSet.GetResource(() => new FaultDataResource(dbAdapterContainer)); if (!Directory.Exists(m_resultsPath)) Directory.CreateDirectory(m_resultsPath); for (int i = 0; i < cycleDataResource.DataGroups.Count; i++) { dataGroup = cycleDataResource.DataGroups[i]; if (faultDataResource.FaultLookup.TryGetValue(dataGroup, out faultGroup)) { rootFileName = FilePath.GetFileNameWithoutExtension(meterDataSet.FilePath); fileName = string.Format("{0},{1:000},Line{2}.dat", rootFileName, i, dataGroup.Line.AssetKey); seriesIDs = dataGroup.DataSeries .Select(series => series.SeriesInfo.ID) .ToList(); eventDataSet = new EventDataSet() { ResultsPath = Path.Combine(m_resultsPath, fileName), MeterDataSet = meterDataSet, TimeZoneOffset = GetTimeZoneOffset(meterDataSet.Meter.TimeZone, dataGroup.StartTime), DataGroup = dataGroup, VICycleDataGroup = cycleDataResource.VICycleDataGroups[i], Faults = faultGroup.Faults, OutputChannels = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>().OutputChannels.Where(channel => seriesIDs.Contains(channel.SeriesID)).ToList() }; WriteResults(eventDataSet); } } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { CycleDataResource cycleDataResource; FaultDataResource faultDataResource; DataGroup dataGroup; VICycleDataGroup viCycleDataGroup; FaultGroup faultGroup; string rootFileName; string fileName; cycleDataResource = meterDataSet.GetResource(() => CycleDataResource.GetResource(meterDataSet, dbAdapterContainer)); faultDataResource = meterDataSet.GetResource(() => new FaultDataResource(dbAdapterContainer)); if (!Directory.Exists(m_resultsPath)) Directory.CreateDirectory(m_resultsPath); for (int i = 0; i < cycleDataResource.DataGroups.Count; i++) { dataGroup = cycleDataResource.DataGroups[i]; if (faultDataResource.FaultLookup.TryGetValue(dataGroup, out faultGroup)) { rootFileName = FilePath.GetFileNameWithoutExtension(meterDataSet.FilePath); fileName = string.Format("{0},{1:000},Line{2}.xml", rootFileName, i, dataGroup.Line.AssetKey); viCycleDataGroup = cycleDataResource.VICycleDataGroups[i]; WriteResults(meterDataSet, dataGroup, viCycleDataGroup, faultGroup.Faults, Path.Combine(m_resultsPath, fileName)); } } }
public List<FlotSeries> GetFlotData(int eventID, List<int> seriesIndexes) { List<FlotSeries> flotSeriesList = new List<FlotSeries>(); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = dbAdapterContainer.GetAdapter<EventDataTableAdapter>(); FaultCurveTableAdapter faultCurveAdapter = dbAdapterContainer.GetAdapter<FaultCurveTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultLocationInfoDataContext faultLocationInfo = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); Meter meter = meterInfo.Meters.First(m => m.ID == eventRow.MeterID); List<FlotSeries> flotInfo = GetFlotInfo(eventID); DateTime epoch = new DateTime(1970, 1, 1); Lazy<Dictionary<int, DataSeries>> waveformData = new Lazy<Dictionary<int, DataSeries>>(() => { return ToDataGroup(meter, eventDataAdapter.GetTimeDomainData(eventRow.EventDataID)).DataSeries .ToDictionary(dataSeries => dataSeries.SeriesInfo.ID); }); Lazy<DataGroup> cycleData = new Lazy<DataGroup>(() => ToDataGroup(meter, eventDataAdapter.GetFrequencyDomainData(eventRow.EventDataID))); Lazy<Dictionary<string, DataSeries>> faultCurveData = new Lazy<Dictionary<string, DataSeries>>(() => { return faultCurveAdapter .GetDataBy(eventRow.ID) .Select(faultCurve => new { Algorithm = faultCurve.Algorithm, DataGroup = ToDataGroup(meter, faultCurve.Data) }) .Where(obj => obj.DataGroup.DataSeries.Count > 0) .ToDictionary(obj => obj.Algorithm, obj => obj.DataGroup[0]); }); foreach (int index in seriesIndexes) { DataSeries dataSeries = null; FlotSeries flotSeries; if (index >= flotInfo.Count) continue; flotSeries = flotInfo[index]; if (flotSeries.FlotType == FlotSeriesType.Waveform) { if (!waveformData.Value.TryGetValue(flotSeries.SeriesID, out dataSeries)) continue; } else if (flotSeries.FlotType == FlotSeriesType.Cycle) { dataSeries = cycleData.Value.DataSeries .Where(series => series.SeriesInfo.Channel.MeasurementType.Name == flotSeries.MeasurementType) .Where(series => series.SeriesInfo.Channel.Phase.Name == flotSeries.Phase) .Skip(flotSeries.SeriesID) .FirstOrDefault(); if ((object)dataSeries == null) continue; } else if (flotSeries.FlotType == FlotSeriesType.Fault) { string algorithm = flotSeries.ChannelName; if (!faultCurveData.Value.TryGetValue(algorithm, out dataSeries)) continue; } else { continue; } foreach (DataPoint dataPoint in dataSeries.DataPoints) { if (!double.IsNaN(dataPoint.Value)) flotSeries.DataPoints.Add(new double[] { dataPoint.Time.Subtract(epoch).TotalMilliseconds, dataPoint.Value }); } flotSeriesList.Add(flotSeries); } } return flotSeriesList; }
public List<FlotSeries> GetFlotData(int eventID, List<int> seriesIndexes) { List<FlotSeries> flotSeriesList = new List<FlotSeries>(); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); EventDataTableAdapter eventDataAdapter = dbAdapterContainer.GetAdapter<EventDataTableAdapter>(); FaultCurveTableAdapter faultCurveAdapter = dbAdapterContainer.GetAdapter<FaultCurveTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultLocationInfoDataContext faultLocationInfo = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); Meter meter = meterInfo.Meters.First(m => m.ID == eventRow.MeterID); List<Series> waveformInfo = GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID); List<string> faultCurveInfo = GetFaultCurveInfo(connection, eventID); DateTime epoch = new DateTime(1970, 1, 1); Lazy<Dictionary<int, DataSeries>> waveformData = new Lazy<Dictionary<int, DataSeries>>(() => { return ToDataGroup(meter, eventDataAdapter.GetTimeDomainData(eventRow.EventDataID)).DataSeries .ToDictionary(dataSeries => dataSeries.SeriesInfo.ID); }); Lazy<DataGroup> cycleData = new Lazy<DataGroup>(() => ToDataGroup(meter, eventDataAdapter.GetFrequencyDomainData(eventRow.EventDataID))); Lazy<Dictionary<string, DataSeries>> faultCurveData = new Lazy<Dictionary<string, DataSeries>>(() => { return faultCurveAdapter .GetDataBy(eventRow.ID) .Select(faultCurve => new { Algorithm = faultCurve.Algorithm, DataGroup = ToDataGroup(meter, faultCurve.Data) }) .Where(obj => obj.DataGroup.DataSeries.Count > 0) .ToDictionary(obj => obj.Algorithm, obj => obj.DataGroup[0]); }); foreach (int index in seriesIndexes) { DataSeries dataSeries = null; FlotSeries flotSeries = null; int waveformIndex = index; int cycleIndex = waveformIndex - waveformInfo.Count; int faultCurveIndex = cycleIndex - CycleDataInfo.Count; if (waveformIndex < waveformInfo.Count) { if (!waveformData.Value.TryGetValue(waveformInfo[index].ID, out dataSeries)) continue; flotSeries = ToFlotSeries(waveformInfo[index]); } else if (cycleIndex < CycleDataInfo.Count) { if (cycleIndex >= cycleData.Value.DataSeries.Count) continue; dataSeries = cycleData.Value[cycleIndex]; flotSeries = new FlotSeries() { MeasurementType = CycleDataInfo[cycleIndex].MeasurementType, MeasurementCharacteristic = CycleDataInfo[cycleIndex].MeasurementCharacteristic, Phase = CycleDataInfo[cycleIndex].Phase, SeriesType = CycleDataInfo[cycleIndex].SeriesType }; } else if (faultCurveIndex < faultCurveInfo.Count) { string algorithm = faultCurveInfo[faultCurveIndex]; if (!faultCurveData.Value.TryGetValue(algorithm, out dataSeries)) continue; flotSeries = ToFlotSeries(faultCurveInfo[faultCurveIndex]); } else { continue; } foreach (DataPoint dataPoint in dataSeries.DataPoints) { if (!double.IsNaN(dataPoint.Value)) flotSeries.DataPoints.Add(new double[] { dataPoint.Time.Subtract(epoch).TotalMilliseconds, dataPoint.Value }); } flotSeriesList.Add(flotSeries); } } return flotSeriesList; }
private void ProcessMeterData(MeterDataSet meterDataSet, DbAdapterContainer dbAdapterContainer) { try { meterDataSet.ConnectionString = m_connectionString; ExecuteDataOperation(meterDataSet, dbAdapterContainer); ExecuteDataWriters(meterDataSet, dbAdapterContainer); } catch (Exception ex) { try { OnHandleException(ex); meterDataSet.FileGroup.Error = 1; dbAdapterContainer.GetAdapter<FileInfoDataContext>().SubmitChanges(); } catch { // Ignore errors here as they are most likely // related to the error we originally caught } } }
protected void Page_Load(object sender, EventArgs e) { if (!IsPostBack) { if (Request["eventId"] != null) { if (Request["faultcurves"] != null) postedShowFaultCurves = Request["faultcurves"]; if (Request["breakerdigitals"] != null) postedShowBreakerDigitals = Request["breakerdigitals"]; postedURLQueryString = string.Concat(Request.QueryString.AllKeys .Where(key => !key.Equals("eventId", StringComparison.OrdinalIgnoreCase)) .Select(key => "&" + HttpUtility.UrlEncode(key) + "=" + HttpUtility.UrlEncode(Request.QueryString[key]))); postedEventId = Request["eventId"]; using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(connectionString)) { try { EventTypeTableAdapter eventTypeAdapter = dbAdapterContainer.GetAdapter<EventTypeTableAdapter>(); EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultSummaryTableAdapter summaryInfo = dbAdapterContainer.GetAdapter<FaultSummaryTableAdapter>(); DisturbanceTableAdapter disturbanceAdapter = dbAdapterContainer.GetAdapter<DisturbanceTableAdapter>(); MeterData.EventRow theevent = eventAdapter.GetDataByID(Convert.ToInt32(postedEventId)).First(); JavaScriptSerializer serializer = new JavaScriptSerializer(); postedSeriesList = serializer.Serialize(SignalCode.GetFlotInfo(theevent.ID)); postedMeterId = theevent.MeterID.ToString(); postedDate = theevent.StartTime.ToShortDateString(); postedEventId = theevent.ID.ToString(); postedEventDate = theevent.StartTime.ToString("yyyy-MM-dd HH:mm:ss.fffffff"); postedEventMilliseconds = theevent.StartTime.Subtract(new DateTime(1970, 1, 1)).TotalMilliseconds.ToString(); postedMeterName = meterInfo.Meters.Single(m => m.ID == theevent.MeterID).Name; MeterData.EventTypeDataTable eventTypes = eventTypeAdapter.GetData(); postedAdjacentEventIds = GetPreviousAndNextEventIds(theevent.ID, dbAdapterContainer.Connection); postedLineName = meterInfo.MeterLines.Where(row => row.LineID == theevent.LineID) .Where(row => row.MeterID == theevent.MeterID) .Select(row => row.LineName) .FirstOrDefault() ?? ""; postedLineLength = meterInfo.Lines .Where(row => row.ID == theevent.LineID) .Select(row => row.Length) .AsEnumerable() .Select(length => length.ToString()) .FirstOrDefault() ?? ""; postedEventName = eventTypes .Where(row => row.ID == theevent.EventTypeID) .Select(row => row.Name) .DefaultIfEmpty("") .Single(); if (postedEventName.Equals("Fault")) { FaultLocationData.FaultSummaryDataTable thesummarydatatable = summaryInfo.GetDataBy(Convert.ToInt32(postedEventId)); FaultLocationData.FaultSummaryRow thesummary = thesummarydatatable .Where(row => row.IsSelectedAlgorithm == 1) .OrderBy(row => row.IsSuppressed) .ThenBy(row => row.Inception) .FirstOrDefault(); if ((object)thesummary != null) { postedStartTime = thesummary.Inception.TimeOfDay.ToString(); postedDurationPeriod = thesummary.DurationCycles.ToString("##.##", CultureInfo.InvariantCulture) + " cycles"; postedMagnitude = thesummary.CurrentMagnitude.ToString("####.#", CultureInfo.InvariantCulture) + " Amps (RMS)"; } } else if (new[] { "Sag", "Swell" }.Contains(postedEventName)) { MeterData.DisturbanceDataTable disturbanceTable = disturbanceAdapter.GetDataBy(theevent.ID); MeterData.DisturbanceRow disturbance = disturbanceTable .Where(row => row.EventTypeID == theevent.EventTypeID) .OrderBy(row => row.StartTime) .FirstOrDefault(); if ((object)disturbance != null) { postedStartTime = disturbance.StartTime.TimeOfDay.ToString(); postedDurationPeriod = disturbance.DurationCycles.ToString("##.##", CultureInfo.InvariantCulture) + " cycles"; if (disturbance.PerUnitMagnitude != -1.0e308) postedMagnitude = disturbance.PerUnitMagnitude.ToString("N3", CultureInfo.InvariantCulture) + " pu (RMS)"; } } } catch (Exception ex) { postedErrorMessage = ex.Message; } } } } }
public static List<FlotSeries> GetFlotInfo(int eventID) { using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(ConnectionString)) using (AdoDataConnection connection = new AdoDataConnection(dbAdapterContainer.Connection, typeof(SqlDataAdapter), false)) { EventTableAdapter eventAdapter = dbAdapterContainer.GetAdapter<EventTableAdapter>(); MeterInfoDataContext meterInfo = dbAdapterContainer.GetAdapter<MeterInfoDataContext>(); FaultLocationInfoDataContext faultInfo = dbAdapterContainer.GetAdapter<FaultLocationInfoDataContext>(); MeterData.EventRow eventRow = eventAdapter.GetDataByID(eventID).FirstOrDefault(); if ((object)eventRow == null) return new List<FlotSeries>(); return GetWaveformInfo(meterInfo.Series, eventRow.MeterID, eventRow.LineID) .Select(ToFlotSeries) .Concat(CycleDataInfo) .Concat(GetFaultCurveInfo(connection, eventID).Select(ToFlotSeries)) .ToList(); } }
public void Initialize(MeterDataSet meterDataSet, DbAdapterContainer dbAdapterContainer) { DataGroup dataGroup; VICycleDataGroup viCycleDataGroup; CycleDataResource cycleDataResource; ConnectionStringParser.ParseConnectionString(meterDataSet.ConnectionString, this); m_disturbances = new Dictionary<DataGroup, List<Disturbance>>(); cycleDataResource = CycleDataResource.GetResource(meterDataSet, dbAdapterContainer); for (int i = 0; i < cycleDataResource.DataGroups.Count; i++) { dataGroup = cycleDataResource.DataGroups[i]; viCycleDataGroup = cycleDataResource.VICycleDataGroups[i]; DetectDisturbances(dataGroup, viCycleDataGroup); } }
/// <summary> /// Processes the file at the given path. /// </summary> /// <param name="fileGroupID">The identifier for the file group to be processed.</param> /// <returns>False if the file was not able to be processed and needs to be processed again later.</returns> public bool ProcessFileGroup(int fileGroupID) { SystemSettings systemSettings; FileInfoDataContext fileInfo; FileGroup fileGroup; DataFile dataFile = null; List<MeterDataSet> meterDataSets; try { systemSettings = new SystemSettings(m_connectionString); using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(systemSettings.DbConnectionString, systemSettings.DbTimeout)) { fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); // Create a file group for this file in the database fileGroup = LoadFileGroup(fileInfo, fileGroupID); if ((object)fileGroup == null) return true; dataFile = fileGroup.DataFiles.FirstOrDefault(); if ((object)dataFile == null) return true; // Parse the file meterDataSets = LoadMeterDataSets(dbAdapterContainer, fileGroup); // Set properties on each of the meter data sets foreach (MeterDataSet meterDataSet in meterDataSets) { meterDataSet.ConnectionString = m_connectionString; meterDataSet.FilePath = dataFile.FilePath; meterDataSet.FileGroup = fileGroup; } // Process meter data sets OnStatusMessage("Processing meter data from file \"{0}\"...", dataFile.FilePath); ProcessMeterDataSets(meterDataSets, systemSettings, dbAdapterContainer); OnStatusMessage("Finished processing data from file \"{0}\".", dataFile.FilePath); } } catch (Exception ex) { string message; if ((object)dataFile != null) message = string.Format("Failed to process file \"{0}\" due to exception: {1}", dataFile.FilePath, ex.Message); else message = string.Format("Failed to process file group \"{0}\" due to exception: {1}", fileGroupID, ex.Message); OnHandleException(new InvalidOperationException(message, ex)); } return true; }
/// <summary> /// Processes data not yet processed /// by this SandBox instance. /// </summary> private void ProcessLatestDataOperation() { string latestDataFile = FilePath.GetAbsolutePath(@"LatestData.bin"); int latestFileGroupID; FileInfoDataContext fileInfo; List<int> newFileGroups; if ((object)m_systemSettings == null) ReloadSystemSettings(); using (FileBackedDictionary<string, int> dictionary = new FileBackedDictionary<string, int>(latestDataFile)) using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(m_systemSettings.DbConnectionString, m_systemSettings.DbTimeout)) { fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); do { dictionary.Compact(); if (!dictionary.TryGetValue("latestFileGroupID", out latestFileGroupID)) latestFileGroupID = 0; newFileGroups = fileInfo.FileGroups .Select(fileGroup => fileGroup.ID) .Where(id => id > latestFileGroupID) .Take(100) .OrderBy(id => id) .ToList(); foreach (int fileGroupID in newFileGroups) { MeterDataProcessor processor = new MeterDataProcessor(LoadSystemSettings()); processor.ProcessFileGroup(fileGroupID); dictionary["latestFileGroupID"] = fileGroupID; } } while (newFileGroups.Count > 0); } }
public void WriteResults(DbAdapterContainer dbAdapterContainer, MeterDataSet meterDataSet) { // Write results to an external data store Log.InfoFormat("Results written to external data store."); }