public void Clone(DataPoint dataPoint) { dataPoint.Timestamp = Timestamp; dataPoint.PointID = PointID; dataPoint.Value = Value; dataPoint.Flags = Flags; }
public void Clone(DataPoint destination) { destination.m_timestamp = m_timestamp; destination.PointID = PointID; destination.Value = Value; destination.Flags = Flags; }
public bool ReadNext(DataPoint point) { if ((object)m_stream == null) throw new NullReferenceException("Stream is not initialized"); if (!m_stream.Read(m_key, m_value)) return false; point.Timestamp = m_key.Timestamp; point.PointID = m_key.PointID; point.Value = m_value.Value1; point.Flags = m_value.Value3; return true; }
private void CompareArchives(object state) { try { const int MessageInterval = 1000000; Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary<string, string> parameters = state as Dictionary<string, string>; if ((object)parameters == null) throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); IEnumerable<string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); // Start calculating total number of source points m_pointCount = 0; ThreadPool.QueueUserWorkItem(CalculateSourcePointCount, new[] { parameters["sourceFilesLocation"], parameters["sourceFilesOffloadLocation"] }); int maxThreads; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) maxThreads = m_defaultMaxThreads; string[] sourceFileNames = sourceFiles.ToArray(); string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long comparedPoints = 0; long validPoints = 0; long invalidPoints = 0; long missingPoints = 0; long duplicatePoints = 0; long resyncs = 0; long displayMessageCount = MessageInterval; SetProgressMaximum(100); using (SnapDBEngine engine = new SnapDBEngine(this, instanceName, parameters["destinationFilesLocation"], parameters["targetFileSize"], parameters["directoryNamingMethod"])) using (StreamWriter missingDataOutput = File.CreateText(FilePath.GetAbsolutePath("MissingData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Comparing \"{0}\"...", FilePath.GetFileName(sourceFileName)); DataPoint sourcePoint = new DataPoint(); DataPoint destinationPoint = new DataPoint(); DataPoint lastPoint = new DataPoint(); Ticks readStartTime = DateTime.UtcNow.Ticks; bool updateProgress, resync, readInitialized = false; using (GSFHistorianStream sourceStream = new GSFHistorianStream(this, sourceFileName, instanceName)) using (SnapDBClient client = new SnapDBClient(engine, sourceStream.InstanceName)) { while (true) { if (sourceStream.ReadNext(sourcePoint)) { if (ignoreDuplicates) { bool success = true; while (success && sourcePoint.PointID == lastPoint.PointID && sourcePoint.Timestamp == lastPoint.Timestamp) { Interlocked.Increment(ref duplicatePoints); success = sourceStream.ReadNext(sourcePoint); } // Finished with source read if (!success) break; } if (readInitialized) { if (!client.ReadNextSnapDBPoint(destinationPoint)) { ShowUpdateMessage("*** Compare for \"{0}\" Failed: Destination Read Was Short ***", FilePath.GetFileName(sourceFileName)); break; } } else { readInitialized = client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); } } else { // Finished with source read break; } resync = false; do { if (resync) Interlocked.Increment(ref resyncs); // See if source and destination points match if (sourcePoint.PointID == destinationPoint.PointID && sourcePoint.Timestamp == destinationPoint.Timestamp) { if (sourcePoint.Value == destinationPoint.Value) { if (sourcePoint.Flags == destinationPoint.Flags) Interlocked.Increment(ref validPoints); else Interlocked.Increment(ref invalidPoints); } else { Interlocked.Increment(ref invalidPoints); } resync = false; } else { // Attempt to resynchronize readers by rescanning to point if we didn't find point and are not resynchronizing already resync = !resync && client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); if (!resync) { Interlocked.Increment(ref missingPoints); lock (missingDataOutput) missingDataOutput.WriteLine("[{0:00000}@{1:yyyy-MM-dd HH:mm:ss.fff}] = {2}({3})", sourcePoint.PointID, new DateTime((long)sourcePoint.Timestamp, DateTimeKind.Utc), sourcePoint.ValueAsSingle, sourcePoint.Flags); } } } while (resync); // Update last point if (ignoreDuplicates) sourcePoint.Clone(lastPoint); updateProgress = false; if (Interlocked.Increment(ref comparedPoints) == displayMessageCount) { if (comparedPoints % (5 * MessageInterval) == 0) ShowUpdateMessage("{0}*** Compared {1:#,##0} points so far averaging {2:#,##0} points per second ***{0}", Environment.NewLine, comparedPoints, comparedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds()); else ShowUpdateMessage("{0}Found {1:#,##0} valid, {2:#,##0} invalid and {3:#,##0} missing points during compare so far...{0}", Environment.NewLine, validPoints, invalidPoints, missingPoints); updateProgress = true; displayMessageCount += MessageInterval; } // Note that point count used here is estimated if (updateProgress && m_pointCount > 0) UpdateProgressBar((int)((comparedPoints / (double)m_pointCount) * 100.0D)); } } if (m_formClosing) loopState.Break(); }); if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Compare Complete ***"); ShowUpdateMessage("Total compare time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), comparedPoints / totalTime.ToSeconds()); UpdateProgressBar(100); ShowUpdateMessage("{0}" + "Total points compared: {1:#,##0}{0}" + " Valid points: {2:#,##0}{0}" + " Invalid points: {3:#,##0}{0}" + " Missing points: {4:#,##0}{0}" + " Duplicate points: {5:#,##0}{0}" + " Resynchronizations: {6:#,##0}{0}" + " Source point count: {7:#,##0}{0}" + "{0}Migrated data conversion {8:##0.000}% accurate", Environment.NewLine, comparedPoints, validPoints, invalidPoints, missingPoints, duplicatePoints, resyncs, comparedPoints + missingPoints, Math.Truncate(validPoints / (double)(comparedPoints + missingPoints) * 100000.0D) / 1000.0D); if (ignoreDuplicates && invalidPoints > 0 && duplicatePoints >= invalidPoints) ShowUpdateMessage( "{0}Note: Since duplicated source data was being ignored and duplicate points outnumber (or are equal to) " + "invalid points, the invalid data is likely an artifact of comparing a duplicated source record that was " + "not archived into the destination.{0}", Environment.NewLine); } } } catch (Exception ex) { ShowUpdateMessage("Failure during compare: {0}", ex.Message); } finally { m_operationStarted = false; } }
private void LiveMigration(object state) { try { const int MessageInterval = 1000000; Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary<string, string> parameters = state as Dictionary<string, string>; if ((object)parameters == null) throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); ClearUpdateMessages(); string instanceName = OpenGSFHistorianArchive( parameters["sourceFilesLocation"], parameters["sourceFilesOffloadLocation"], parameters["instanceName"]); UpdateInstanceName(instanceName); if (!m_archiveReady.Wait(300000)) { ShowUpdateMessage("Still initializing source historian after 5 minutes..."); m_archiveReady.Wait(); } bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); DataPoint point = new DataPoint(); long migratedPoints = 0; long displayMessageCount = MessageInterval; SetProgressMaximum(100); Ticks readStartTime = DateTime.UtcNow.Ticks; using (SnapDBEngine engine = new SnapDBEngine(this, instanceName, parameters["destinationFilesLocation"], parameters["targetFileSize"], parameters["directoryNamingMethod"])) using (SnapDBClient client = new SnapDBClient(engine, instanceName)) { while (ReadNextGSFHistorianPoint(point)) { client.WriteSnapDBData(point, ignoreDuplicates); migratedPoints++; if (migratedPoints == displayMessageCount) { ShowUpdateMessage("{0}Migrated {1:#,##0} points so far averaging {2:#,##0} points per second...{0}", Environment.NewLine, migratedPoints, migratedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds()); if (m_pointCount > 0) UpdateProgressBar((int)((migratedPoints / (double)m_pointCount) * 100.0D)); displayMessageCount += MessageInterval; } if (m_formClosing) break; } if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { client.FlushSnapDB(); ShowUpdateMessage("*** Migration Complete ***"); ShowUpdateMessage("Total migration time {0}", (DateTime.UtcNow.Ticks - operationStartTime).ToElapsedTimeString(3)); UpdateProgressBar(100); } } } catch (Exception ex) { ShowUpdateMessage("Failure during migration: {0}", ex.Message); } finally { m_operationStarted = false; CloseGSFHistorianArchive(); } }
public DataPoint Clone() { DataPoint dataPoint = new DataPoint(); Clone(dataPoint); return dataPoint; }
private void CompareArchives(object state) { try { Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary<string, string> parameters = state as Dictionary<string, string>; if ((object)parameters == null) throw new ArgumentNullException(nameof(state), "Could not interpret thread state as parameters dictionary"); string sourceHostAddress = parameters["sourceHostAddress"]; int sourceDataPort = int.Parse(parameters["sourceDataPort"]); int sourceMetadataPort = int.Parse(parameters["sourceMetaDataPort"]); string sourceInstanceName = parameters["sourceInstanceName"]; string destinationHostAddress = parameters["destinationHostAddress"]; int destinationDataPort = int.Parse(parameters["destinationDataPort"]); int destinationMetaDataPort = int.Parse(parameters["destinationMetaDataPort"]); string destinationInstanceName = parameters["destinationInstanceName"]; int metaDataTimeout = int.Parse(parameters["metaDataTimeout"]) * 1000; ulong startTime = (ulong)DateTime.Parse(parameters["startTime"]).Ticks; ulong endTime = (ulong)DateTime.Parse(parameters["endTime"]).Ticks; int messageInterval = int.Parse(parameters["messageInterval"]); bool enableLogging = parameters["enableLogging"].ParseBoolean(); string logFileName = parameters["logFileName"]; m_frameRate = int.Parse(parameters["m_frameRate"]); m_subsecondOffset = (ulong)(Ticks.PerSecond / m_frameRate); ShowUpdateMessage("Loading source connection metadata..."); List<Metadata> sourceMetadata = Metadata.Query(sourceHostAddress, sourceMetadataPort, metaDataTimeout); ShowUpdateMessage("Loading destination connection metadata..."); List<Metadata> destinationMetadata = Metadata.Query(destinationHostAddress, destinationMetaDataPort, metaDataTimeout); Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Metadata Load Complete ***"); ShowUpdateMessage($"Total metadata load time {totalTime.ToElapsedTimeString(3)}..."); ShowUpdateMessage("Analyzing metadata..."); operationStartTime = DateTime.UtcNow.Ticks; Dictionary<ulong, ulong> sourcePointMappings = new Dictionary<ulong, ulong>(); Dictionary<ulong, ulong> destinationPointMappings = new Dictionary<ulong, ulong>(); Dictionary<ulong, string> pointDevices = new Dictionary<ulong, string>(); HashSet<ulong> frequencies = new HashSet<ulong>(); StreamWriter writer = null; string logFileNameTemplate = $"{FilePath.GetDirectoryName(logFileName)}{FilePath.GetFileNameWithoutExtension(logFileName)}-{{0}}{FilePath.GetExtension(logFileName)}"; if (enableLogging) writer = new StreamWriter(FilePath.GetAbsolutePath(string.Format(logFileNameTemplate, "metadata"))); AnalyzeMetadata(writer, startTime, endTime, sourceMetadata, destinationMetadata, sourcePointMappings, destinationPointMappings, pointDevices, frequencies); TimeSpan range = new TimeSpan((long)(endTime - startTime)); double timespan = range.TotalSeconds; long comparedPoints = 0; long validPoints = 0; long invalidPoints = 0; long missingSourcePoints = 0; long missingDestinationPoints = 0; long duplicateSourcePoints = 0; long duplicateDestinationPoints = 0; long receivedSourcePoints = 0; long receivedDestinationPoints = 0; long processedPoints = 0; long receivedAsNaNPoints = 0; long receivedAsNaNSourcePoints = 0; long receivedAsNaNDestinationPoints = 0; long baseSourceSubsecondLoss = 0; long baseDestinationSubsecondLoss = 0; long baseSourcePointLoss = 0; long baseDestinationPointLoss = 0; ulong currentTimestamp; ulong lastSourceTimestamp = 0; ulong lastDestinationTimestamp = 0; long displayMessageCount = messageInterval; Dictionary<ulong, Dictionary<int, long[]>> hourlySummaries = new Dictionary<ulong, Dictionary<int, long[]>>(); // PointID[HourIndex[ValueCount[7]]] DataPoint sourcePoint = new DataPoint(); DataPoint destinationPoint = new DataPoint(); DataPoint referencePoint = new DataPoint(); Ticks readStartTime = DateTime.UtcNow.Ticks; Func<ulong, int> getHourIndex = timestamp => (int)Math.Truncate(new TimeSpan((long)(timestamp - startTime)).TotalHours); Func<ulong, Dictionary<int, long[]>> getHourlySummary = pointID => hourlySummaries.GetOrAdd(pointID, id => new Dictionary<int, long[]>()); Func<DataPoint, bool> pointIsMissing = dataPoint => { float pointValue = dataPoint.ValueAsSingle; return float.IsNaN(pointValue) || (frequencies.Contains(dataPoint.PointID) && pointValue == 0.0F); }; Action<DataPoint, int> incrementValueCount = (dataPoint, valueIndex) => { switch (valueIndex) { case ComparedValue: if (pointIsMissing(dataPoint)) receivedAsNaNPoints++; break; case MissingSourceValue: if (pointIsMissing(dataPoint)) receivedAsNaNDestinationPoints++; break; case MissingDestinationValue: if (pointIsMissing(dataPoint)) receivedAsNaNSourcePoints++; break; } Dictionary<int, long[]> summary = getHourlySummary(dataPoint.PointID); int hourIndex = getHourIndex(dataPoint.Timestamp); long[] counts = summary.GetOrAdd(hourIndex, index => new long[7]); counts[valueIndex] = counts[valueIndex] + 1; }; ProcessQueue<Tuple<DataPoint, int>>.ProcessItemsFunctionSignature processActions = items => { foreach (Tuple<DataPoint, int> item in items) incrementValueCount(item.Item1, item.Item2); }; ProcessQueue<Dictionary<ulong, DataPoint[]>>.ProcessItemFunctionSignature processDataBlock = dataBlock => { HashSet<ulong> missingSourceIDs = new HashSet<ulong>(sourcePointMappings.Values); missingSourceIDs.ExceptWith(dataBlock.Values.Select(points => points[SourcePoint]).Where(point => (object)point != null).Select(point => point.PointID)); baseSourcePointLoss += missingSourceIDs.Count; HashSet<ulong> missingDestinationIDs = new HashSet<ulong>(destinationPointMappings.Values); missingDestinationIDs.ExceptWith(dataBlock.Values.Select(points => points[DestinationPoint]).Where(point => (object)point != null).Select(point => point.PointID)); baseDestinationPointLoss += missingDestinationIDs.Count; }; ProcessQueue<Tuple<DataPoint, int>> counterIncrements = ProcessQueue<Tuple<DataPoint, int>>.CreateRealTimeQueue(processActions); ProcessQueue<Dictionary<ulong, DataPoint[]>> dataBlockQueue = ProcessQueue<Dictionary<ulong, DataPoint[]>>.CreateRealTimeQueue(processDataBlock); Action<DataPoint> logMissingSourceValue = dataPoint => counterIncrements.Add(new Tuple<DataPoint, int>(dataPoint.Clone(), MissingSourceValue)); Action<DataPoint> logMissingDestinationValue = dataPoint => counterIncrements.Add(new Tuple<DataPoint, int>(dataPoint.Clone(), MissingDestinationValue)); Action<DataPoint> logValidValue = dataPoint => counterIncrements.Add(new Tuple<DataPoint, int>(dataPoint.Clone(), ValidValue)); Action<DataPoint> logInvalidValue = dataPoint => counterIncrements.Add(new Tuple<DataPoint, int>(dataPoint.Clone(), InvalidValue)); Action<DataPoint> logComparedValue = dataPoint => counterIncrements.Add(new Tuple<DataPoint, int>(dataPoint.Clone(), ComparedValue)); Action<DataPoint> logDuplicateSourceValue = dataPoint => counterIncrements.Add(new Tuple<DataPoint, int>(dataPoint.Clone(), DuplicateSourceValue)); Action<DataPoint> logDuplicateDestinationValue = dataPoint => counterIncrements.Add(new Tuple<DataPoint, int>(dataPoint.Clone(), DuplicateDestinationValue)); Func<DataPoint, DataPoint> getReferencePoint = dataPoint => { referencePoint.PointID = sourcePointMappings[dataPoint.PointID]; referencePoint.Timestamp = dataPoint.Timestamp; return referencePoint; }; ShowUpdateMessage("Comparing archives..."); // Start process queues counterIncrements.Start(); dataBlockQueue.Start(); using (SnapDBClient sourceClient = new SnapDBClient(sourceHostAddress, sourceDataPort, sourceInstanceName, startTime, endTime, m_frameRate, sourcePointMappings.Values)) using (SnapDBClient destinationClient = new SnapDBClient(destinationHostAddress, destinationDataPort, destinationInstanceName, startTime, endTime, m_frameRate, destinationPointMappings.Values)) { // Scan to first record in source if (!sourceClient.ReadNext(sourcePoint)) throw new InvalidOperationException("No data for specified time range in source connection!"); // Scan to first record in destination if (!destinationClient.ReadNext(destinationPoint)) throw new InvalidOperationException("No data for specified time range in destination connection!"); while (!m_formClosing) { // Compare timestamps of current records int timeComparison = DataPoint.CompareTimestamps(sourcePoint.Timestamp, destinationPoint.Timestamp, m_frameRate); bool readSuccess = true; if (timeComparison == 0) { // Check for entire blocks of missing data for expected frame rate CheckLastTimestamp(ref lastSourceTimestamp, ref baseSourceSubsecondLoss, sourcePoint.Timestamp); CheckLastTimestamp(ref lastDestinationTimestamp, ref baseDestinationSubsecondLoss, destinationPoint.Timestamp); } else { // If timestamps do not match, synchronize starting times of source and destination datasets while (readSuccess && timeComparison != 0) { if (timeComparison < 0) { // Destination has no data where source begins, scan source forward to match destination start time do { CheckLastTimestamp(ref lastSourceTimestamp, ref baseSourceSubsecondLoss, sourcePoint.Timestamp); // Only count defined points as missing if (destinationPointMappings.ContainsKey(sourcePoint.PointID)) { missingDestinationPoints++; if (enableLogging) logMissingDestinationValue(sourcePoint); } if (!sourceClient.ReadNext(sourcePoint)) { readSuccess = false; break; } timeComparison = DataPoint.CompareTimestamps(sourcePoint.Timestamp, destinationPoint.Timestamp, m_frameRate); } while (timeComparison < 0); CheckLastTimestamp(ref lastDestinationTimestamp, ref baseDestinationSubsecondLoss, destinationPoint.Timestamp); } else // timeComparison > 0 { // Source has no data where destination begins, scan destination forward to match source start time do { CheckLastTimestamp(ref lastDestinationTimestamp, ref baseDestinationSubsecondLoss, destinationPoint.Timestamp); // Only count defined points as missing if (sourcePointMappings.ContainsKey(destinationPoint.PointID)) { missingSourcePoints++; if (enableLogging) logMissingSourceValue(getReferencePoint(destinationPoint)); } if (!destinationClient.ReadNext(destinationPoint)) { readSuccess = false; break; } timeComparison = DataPoint.CompareTimestamps(sourcePoint.Timestamp, destinationPoint.Timestamp, m_frameRate); } while (timeComparison > 0); CheckLastTimestamp(ref lastSourceTimestamp, ref baseSourceSubsecondLoss, sourcePoint.Timestamp); } } } // Finished with data read if (!readSuccess) { ShowUpdateMessage("*** End of data read encountered ***"); break; } // Read all time adjusted points for the current timestamp into a single block currentTimestamp = DataPoint.RoundTimestamp(sourcePoint.Timestamp, m_frameRate); Dictionary<ulong, DataPoint[]> dataBlock = new Dictionary<ulong, DataPoint[]>(); // Load source data for current timestamp do { if (!sourceClient.ReadNext(sourcePoint)) { readSuccess = false; break; } if (!destinationPointMappings.ContainsKey(sourcePoint.PointID)) continue; receivedSourcePoints++; timeComparison = DataPoint.CompareTimestamps(sourcePoint.Timestamp, currentTimestamp, m_frameRate); if (timeComparison == 0) { DataPoint[] points = dataBlock.GetOrAdd(sourcePoint.PointID, id => new DataPoint[2]); if ((object)points[SourcePoint] != null) { duplicateSourcePoints++; if (enableLogging) logDuplicateSourceValue(points[SourcePoint]); } points[SourcePoint] = sourcePoint.Clone(); } } while (timeComparison == 0); // Finished with data read if (!readSuccess) { ShowUpdateMessage("*** End of data read encountered ***"); break; } // Load destination data for current timestamp do { if (!destinationClient.ReadNext(destinationPoint)) { readSuccess = false; break; } if (!sourcePointMappings.ContainsKey(destinationPoint.PointID)) continue; receivedDestinationPoints++; timeComparison = DataPoint.CompareTimestamps(destinationPoint.Timestamp, currentTimestamp, m_frameRate); if (timeComparison == 0) { DataPoint[] points = dataBlock.GetOrAdd(sourcePointMappings[destinationPoint.PointID], id => new DataPoint[2]); if ((object)points[DestinationPoint] != null) { duplicateDestinationPoints++; if (enableLogging) logDuplicateDestinationValue(getReferencePoint(points[DestinationPoint])); } points[DestinationPoint] = destinationPoint.Clone(); } } while (timeComparison == 0); // Finished with data read - destination is short of source read if (!readSuccess) { ShowUpdateMessage("*** End of data read encountered: destination read was short of data available in source ***"); break; } foreach (DataPoint[] points in dataBlock.Values) { if ((object)points[SourcePoint] == null) { missingSourcePoints++; if (enableLogging) logMissingSourceValue(getReferencePoint(points[DestinationPoint])); } else if ((object)points[DestinationPoint] == null) { missingDestinationPoints++; if (enableLogging) logMissingDestinationValue(points[SourcePoint]); } else { if (points[SourcePoint].Value == points[DestinationPoint].Value) { if (points[SourcePoint].Flags == points[DestinationPoint].Flags) { validPoints++; if (enableLogging) logValidValue(points[SourcePoint]); } else { invalidPoints++; if (enableLogging) logInvalidValue(points[SourcePoint]); } } else { invalidPoints++; if (enableLogging) logInvalidValue(points[SourcePoint]); } comparedPoints++; if (enableLogging) logComparedValue(points[SourcePoint]); } if (processedPoints++ == displayMessageCount) { if (processedPoints % (5 * messageInterval) == 0) ShowUpdateMessage($"{Environment.NewLine}*** Processed {processedPoints:N0} points so far averaging {processedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds():N0} points per second ***{Environment.NewLine}"); else ShowUpdateMessage($"{Environment.NewLine}Found {validPoints:N0} valid, {invalidPoints:N0} invalid and {missingSourcePoints + missingDestinationPoints:N0} missing points during compare so far...{Environment.NewLine}"); displayMessageCount += messageInterval; UpdateProgressBar((int)((1.0D - new Ticks((long)(endTime - sourcePoint.Timestamp)).ToSeconds() / timespan) * 100.0D)); } } // Queue data block to complete analyze process dataBlockQueue.Add(dataBlock); } if (m_formClosing) { ShowUpdateMessage("Comparison canceled."); UpdateProgressBar(0); } else { totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Compare Complete ***"); UpdateProgressBar(100); ShowUpdateMessage("Completing count processing..."); counterIncrements.Flush(); dataBlockQueue.Flush(); ShowUpdateMessage("*** Count Processing Complete ***"); long totalReceivedAsNaNSourcePoints = receivedAsNaNSourcePoints + receivedAsNaNPoints; long totalReceivedAsNaNDestinationPoints = receivedAsNaNDestinationPoints + receivedAsNaNPoints; long totalBaseSourcePointLoss = totalReceivedAsNaNSourcePoints + baseSourcePointLoss + baseSourceSubsecondLoss * sourceMetadata.Count; long totalBaseDestinationPointLoss = totalReceivedAsNaNDestinationPoints + baseDestinationPointLoss + baseDestinationSubsecondLoss * destinationMetadata.Count; // Since NaN values are actually "received", create adjusted counts for comparing actual network loss long networkSourcePointLoss = totalBaseSourcePointLoss - totalReceivedAsNaNSourcePoints; long networkDestinationPointLoss = totalBaseDestinationPointLoss - totalReceivedAsNaNDestinationPoints; long expectedPoints = (long)(timespan * m_frameRate * sourceMetadata.Count); double sourceCompleteness = Math.Round(receivedSourcePoints / (double)expectedPoints * 100000.0D) / 1000.0D; double destinationCompleteness = Math.Round(receivedDestinationPoints / (double)expectedPoints * 100000.0D) / 1000.0D; string overallSummary = $"Total compare time {totalTime.ToElapsedTimeString(3)} at {expectedPoints / totalTime.ToSeconds():N0} points per second.{Environment.NewLine}" + $"{Environment.NewLine}" + $" Meta-data points: {sourceMetadata.Count}{Environment.NewLine}" + $" Time-span covered: {timespan:N0} seconds: {Ticks.FromSeconds(timespan).ToElapsedTimeString(2)}{Environment.NewLine}" + $" Expected points: {expectedPoints:N0}{Environment.NewLine}" + $" Processed points: {processedPoints:N0}{Environment.NewLine}" + $" Compared points: {comparedPoints:N0}{Environment.NewLine}" + $" Valid points: {validPoints:N0}{Environment.NewLine}" + $" Invalid points: {invalidPoints:N0}{Environment.NewLine}" + $" Received NaN source points: {totalReceivedAsNaNSourcePoints:N0}{Environment.NewLine}" + $" Received NaN dest points: {totalReceivedAsNaNDestinationPoints:N0}{Environment.NewLine}" + $" Missing source points: {missingSourcePoints:N0}{Environment.NewLine}" + $" Missing destination points: {missingDestinationPoints:N0}{Environment.NewLine}" + $" Base source point loss: {baseSourcePointLoss:N0}{Environment.NewLine}" + $"Base destination point loss: {baseDestinationPointLoss:N0}{Environment.NewLine}" + $" Source duplicates: {duplicateSourcePoints:N0}{Environment.NewLine}" + $" Destination duplicates: {duplicateDestinationPoints:N0}{Environment.NewLine}" + $" Overall data accuracy: {Math.Round(validPoints / (double)comparedPoints * 100000.0D) / 1000.0D:N3}%{Environment.NewLine}" + $"{Environment.NewLine}" + $" Missing source sub-seconds: {baseSourceSubsecondLoss:N0}, outage of {new Ticks(baseSourceSubsecondLoss * (long)m_subsecondOffset).ToElapsedTimeString(2)}{Environment.NewLine}" + $" Total base source loss: {totalBaseSourcePointLoss:N0}: {Math.Round(totalBaseSourcePointLoss / (double)expectedPoints.NotZero(1) * 100000.0D) / 1000.0D:N3}%{Environment.NewLine}" + $" Network source loss: {networkSourcePointLoss:N0}: {Math.Round(networkSourcePointLoss / (double)expectedPoints.NotZero(1) * 100000.0D) / 1000.0D:N3}%{Environment.NewLine}" + $" Received source points: {receivedSourcePoints:N0}{Environment.NewLine}" + $" Source completeness: {sourceCompleteness:N3}%{Environment.NewLine}" + $"{Environment.NewLine}" + $" Missing dest sub-seconds: {baseDestinationSubsecondLoss:N0}, outage of {new Ticks(baseDestinationSubsecondLoss * (long)m_subsecondOffset).ToElapsedTimeString(2)}{Environment.NewLine}" + $"Total base destination loss: {totalBaseDestinationPointLoss:N0}: {Math.Round(totalBaseDestinationPointLoss / (double)expectedPoints.NotZero(1) * 100000.0D) / 1000.0D:N3}%{Environment.NewLine}" + $" Network destination loss: {networkDestinationPointLoss:N0}: {Math.Round(networkDestinationPointLoss / (double)expectedPoints.NotZero(1) * 100000.0D) / 1000.0D:N3}%{Environment.NewLine}" + $"Received destination points: {receivedDestinationPoints:N0}{Environment.NewLine}" + $" Destination completeness: {destinationCompleteness:N3}%{Environment.NewLine}" + $"{Environment.NewLine}" + $">> {Math.Round(missingSourcePoints / (double)(comparedPoints + missingSourcePoints).NotZero(1) * 100000.0D) / 1000.0D:N3}% missing from source that exists in destination{Environment.NewLine}" + $">> {Math.Round(missingDestinationPoints / (double)(comparedPoints + missingDestinationPoints).NotZero(1) * 100000.0D) / 1000.0D:N3}% missing from destination that exists in source{Environment.NewLine}"; ShowUpdateMessage(overallSummary); if (enableLogging) { using (writer = new StreamWriter(FilePath.GetAbsolutePath(string.Format(logFileNameTemplate, "overall")))) writer.WriteLine(overallSummary); int totalHours = getHourIndex(endTime); if (timespan < 3600.0D) { totalHours++; } else { TimeSpan span = new TimeSpan((long)endTime); if (span.Minutes != 0 || span.Seconds != 0) totalHours++; } WriteLogFiles(logFileNameTemplate, totalHours, sourceMetadata, hourlySummaries, pointDevices); } } } } catch (Exception ex) { ShowUpdateMessage($"Failure during historian comparison: {ex.Message}"); } finally { EnableGoButton(true); } }