private void CompareArchives(object state) { try { const int MessageInterval = 1000000; Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary <string, string> parameters = state as Dictionary <string, string>; if ((object)parameters == null) { throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); } ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) { throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); } IEnumerable <string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) { sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); } // Start calculating total number of source points m_pointCount = 0; ThreadPool.QueueUserWorkItem(CalculateSourcePointCount, new[] { parameters["sourceFilesLocation"], parameters["sourceFilesOffloadLocation"] }); int maxThreads; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) { maxThreads = m_defaultMaxThreads; } string[] sourceFileNames = sourceFiles.ToArray(); string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long comparedPoints = 0; long validPoints = 0; long invalidPoints = 0; long missingPoints = 0; long duplicatePoints = 0; long resyncs = 0; long displayMessageCount = MessageInterval; SetProgressMaximum(100); using (SnapDBEngine engine = new SnapDBEngine(this, instanceName, parameters["destinationFilesLocation"], parameters["targetFileSize"], parameters["directoryNamingMethod"])) using (StreamWriter missingDataOutput = File.CreateText(FilePath.GetAbsolutePath("MissingData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Comparing \"{0}\"...", FilePath.GetFileName(sourceFileName)); DataPoint sourcePoint = new DataPoint(); DataPoint destinationPoint = new DataPoint(); DataPoint lastPoint = new DataPoint(); Ticks readStartTime = DateTime.UtcNow.Ticks; bool updateProgress, resync, readInitialized = false; using (GSFHistorianStream sourceStream = new GSFHistorianStream(this, sourceFileName, instanceName)) using (SnapDBClient client = new SnapDBClient(engine, sourceStream.InstanceName)) { while (true) { if (sourceStream.ReadNext(sourcePoint)) { if (ignoreDuplicates) { bool success = true; while (success && sourcePoint.PointID == lastPoint.PointID && sourcePoint.Timestamp == lastPoint.Timestamp) { Interlocked.Increment(ref duplicatePoints); success = sourceStream.ReadNext(sourcePoint); } // Finished with source read if (!success) { break; } } if (readInitialized) { if (!client.ReadNextSnapDBPoint(destinationPoint)) { ShowUpdateMessage("*** Compare for \"{0}\" Failed: Destination Read Was Short ***", FilePath.GetFileName(sourceFileName)); break; } } else { readInitialized = client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); } } else { // Finished with source read break; } resync = false; do { if (resync) { Interlocked.Increment(ref resyncs); } // See if source and destination points match if (sourcePoint.PointID == destinationPoint.PointID && sourcePoint.Timestamp == destinationPoint.Timestamp) { if (sourcePoint.Value == destinationPoint.Value) { if (sourcePoint.Flags == destinationPoint.Flags) { Interlocked.Increment(ref validPoints); } else { Interlocked.Increment(ref invalidPoints); } } else { Interlocked.Increment(ref invalidPoints); } resync = false; } else { // Attempt to resynchronize readers by rescanning to point if we didn't find point and are not resynchronizing already resync = !resync && client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); if (!resync) { Interlocked.Increment(ref missingPoints); lock (missingDataOutput) missingDataOutput.WriteLine("[{0:00000}@{1:yyyy-MM-dd HH:mm:ss.fff}] = {2}({3})", sourcePoint.PointID, new DateTime((long)sourcePoint.Timestamp, DateTimeKind.Utc), sourcePoint.ValueAsSingle, sourcePoint.Flags); } } }while (resync); // Update last point if (ignoreDuplicates) { sourcePoint.Clone(lastPoint); } updateProgress = false; if (Interlocked.Increment(ref comparedPoints) == displayMessageCount) { if (comparedPoints % (5 * MessageInterval) == 0) { ShowUpdateMessage("{0}*** Compared {1:#,##0} points so far averaging {2:#,##0} points per second ***{0}", Environment.NewLine, comparedPoints, comparedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds()); } else { ShowUpdateMessage("{0}Found {1:#,##0} valid, {2:#,##0} invalid and {3:#,##0} missing points during compare so far...{0}", Environment.NewLine, validPoints, invalidPoints, missingPoints); } updateProgress = true; displayMessageCount += MessageInterval; } // Note that point count used here is estimated if (updateProgress && m_pointCount > 0) { UpdateProgressBar((int)((comparedPoints / (double)m_pointCount) * 100.0D)); } } } if (m_formClosing) { loopState.Break(); } }); if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Compare Complete ***"); ShowUpdateMessage("Total compare time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), comparedPoints / totalTime.ToSeconds()); UpdateProgressBar(100); ShowUpdateMessage("{0}" + "Total points compared: {1:#,##0}{0}" + " Valid points: {2:#,##0}{0}" + " Invalid points: {3:#,##0}{0}" + " Missing points: {4:#,##0}{0}" + " Duplicate points: {5:#,##0}{0}" + " Resynchronizations: {6:#,##0}{0}" + " Source point count: {7:#,##0}{0}" + "{0}Migrated data conversion {8:##0.000}% accurate", Environment.NewLine, comparedPoints, validPoints, invalidPoints, missingPoints, duplicatePoints, resyncs, comparedPoints + missingPoints, Math.Truncate(validPoints / (double)(comparedPoints + missingPoints) * 100000.0D) / 1000.0D); if (ignoreDuplicates && invalidPoints > 0 && duplicatePoints >= invalidPoints) { ShowUpdateMessage( "{0}Note: Since duplicated source data was being ignored and duplicate points outnumber (or are equal to) " + "invalid points, the invalid data is likely an artifact of comparing a duplicated source record that was " + "not archived into the destination.{0}", Environment.NewLine); } } } } catch (Exception ex) { ShowUpdateMessage("Failure during compare: {0}", ex.Message); } finally { m_operationStarted = false; } }
private void CompareArchives(object state) { try { const int MessageInterval = 1000000; Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary<string, string> parameters = state as Dictionary<string, string>; if ((object)parameters == null) throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); IEnumerable<string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); // Start calculating total number of source points m_pointCount = 0; ThreadPool.QueueUserWorkItem(CalculateSourcePointCount, new[] { parameters["sourceFilesLocation"], parameters["sourceFilesOffloadLocation"] }); int maxThreads; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) maxThreads = m_defaultMaxThreads; string[] sourceFileNames = sourceFiles.ToArray(); string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long comparedPoints = 0; long validPoints = 0; long invalidPoints = 0; long missingPoints = 0; long duplicatePoints = 0; long resyncs = 0; long displayMessageCount = MessageInterval; SetProgressMaximum(100); using (SnapDBEngine engine = new SnapDBEngine(this, instanceName, parameters["destinationFilesLocation"], parameters["targetFileSize"], parameters["directoryNamingMethod"])) using (StreamWriter missingDataOutput = File.CreateText(FilePath.GetAbsolutePath("MissingData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Comparing \"{0}\"...", FilePath.GetFileName(sourceFileName)); DataPoint sourcePoint = new DataPoint(); DataPoint destinationPoint = new DataPoint(); DataPoint lastPoint = new DataPoint(); Ticks readStartTime = DateTime.UtcNow.Ticks; bool updateProgress, resync, readInitialized = false; using (GSFHistorianStream sourceStream = new GSFHistorianStream(this, sourceFileName, instanceName)) using (SnapDBClient client = new SnapDBClient(engine, sourceStream.InstanceName)) { while (true) { if (sourceStream.ReadNext(sourcePoint)) { if (ignoreDuplicates) { bool success = true; while (success && sourcePoint.PointID == lastPoint.PointID && sourcePoint.Timestamp == lastPoint.Timestamp) { Interlocked.Increment(ref duplicatePoints); success = sourceStream.ReadNext(sourcePoint); } // Finished with source read if (!success) break; } if (readInitialized) { if (!client.ReadNextSnapDBPoint(destinationPoint)) { ShowUpdateMessage("*** Compare for \"{0}\" Failed: Destination Read Was Short ***", FilePath.GetFileName(sourceFileName)); break; } } else { readInitialized = client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); } } else { // Finished with source read break; } resync = false; do { if (resync) Interlocked.Increment(ref resyncs); // See if source and destination points match if (sourcePoint.PointID == destinationPoint.PointID && sourcePoint.Timestamp == destinationPoint.Timestamp) { if (sourcePoint.Value == destinationPoint.Value) { if (sourcePoint.Flags == destinationPoint.Flags) Interlocked.Increment(ref validPoints); else Interlocked.Increment(ref invalidPoints); } else { Interlocked.Increment(ref invalidPoints); } resync = false; } else { // Attempt to resynchronize readers by rescanning to point if we didn't find point and are not resynchronizing already resync = !resync && client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); if (!resync) { Interlocked.Increment(ref missingPoints); lock (missingDataOutput) missingDataOutput.WriteLine("[{0:00000}@{1:yyyy-MM-dd HH:mm:ss.fff}] = {2}({3})", sourcePoint.PointID, new DateTime((long)sourcePoint.Timestamp, DateTimeKind.Utc), sourcePoint.ValueAsSingle, sourcePoint.Flags); } } } while (resync); // Update last point if (ignoreDuplicates) sourcePoint.Clone(lastPoint); updateProgress = false; if (Interlocked.Increment(ref comparedPoints) == displayMessageCount) { if (comparedPoints % (5 * MessageInterval) == 0) ShowUpdateMessage("{0}*** Compared {1:#,##0} points so far averaging {2:#,##0} points per second ***{0}", Environment.NewLine, comparedPoints, comparedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds()); else ShowUpdateMessage("{0}Found {1:#,##0} valid, {2:#,##0} invalid and {3:#,##0} missing points during compare so far...{0}", Environment.NewLine, validPoints, invalidPoints, missingPoints); updateProgress = true; displayMessageCount += MessageInterval; } // Note that point count used here is estimated if (updateProgress && m_pointCount > 0) UpdateProgressBar((int)((comparedPoints / (double)m_pointCount) * 100.0D)); } } if (m_formClosing) loopState.Break(); }); if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Compare Complete ***"); ShowUpdateMessage("Total compare time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), comparedPoints / totalTime.ToSeconds()); UpdateProgressBar(100); ShowUpdateMessage("{0}" + "Total points compared: {1:#,##0}{0}" + " Valid points: {2:#,##0}{0}" + " Invalid points: {3:#,##0}{0}" + " Missing points: {4:#,##0}{0}" + " Duplicate points: {5:#,##0}{0}" + " Resynchronizations: {6:#,##0}{0}" + " Source point count: {7:#,##0}{0}" + "{0}Migrated data conversion {8:##0.000}% accurate", Environment.NewLine, comparedPoints, validPoints, invalidPoints, missingPoints, duplicatePoints, resyncs, comparedPoints + missingPoints, Math.Truncate(validPoints / (double)(comparedPoints + missingPoints) * 100000.0D) / 1000.0D); if (ignoreDuplicates && invalidPoints > 0 && duplicatePoints >= invalidPoints) ShowUpdateMessage( "{0}Note: Since duplicated source data was being ignored and duplicate points outnumber (or are equal to) " + "invalid points, the invalid data is likely an artifact of comparing a duplicated source record that was " + "not archived into the destination.{0}", Environment.NewLine); } } } catch (Exception ex) { ShowUpdateMessage("Failure during compare: {0}", ex.Message); } finally { m_operationStarted = false; } }
private void FastMigration(object state) { try { long operationStartTime = DateTime.UtcNow.Ticks; Dictionary <string, string> parameters = state as Dictionary <string, string>; if ((object)parameters == null) { throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); } ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) { throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); } IEnumerable <string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) { sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); } int maxThreads, methodIndex; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) { maxThreads = m_defaultMaxThreads; } if (!int.TryParse(parameters["directoryNamingMethod"], out methodIndex) || !Enum.IsDefined(typeof(ArchiveDirectoryMethod), methodIndex)) { methodIndex = (int)ArchiveDirectoryMethod.YearThenMonth; } ArchiveDirectoryMethod method = (ArchiveDirectoryMethod)methodIndex; HistorianFileEncoding encoder = new HistorianFileEncoding(); string[] sourceFileNames = sourceFiles.ToArray(); string destinationPath = parameters["destinationFilesLocation"]; string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long totalProcessedPoints = 0; int processedFiles = 0; SetProgressMaximum(sourceFileNames.Length); using (StreamWriter duplicateDataOutput = File.CreateText(FilePath.GetAbsolutePath("DuplicateData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Migrating \"{0}\"...", FilePath.GetFileName(sourceFileName)); long fileConversionStartTime = DateTime.UtcNow.Ticks; long migratedPoints; if (ignoreDuplicates) { // Migrate using SortedTreeFileSimpleWriter.CreateNonSequential() with raw unsorted historian file read migratedPoints = ConvertArchiveFile.ConvertVersion1FileIgnoreDuplicates( sourceFileName, GetDestinationFileName(sourceFileName, instanceName, destinationPath, method), encoder.EncodingMethod); } else { // Migrate using SortedTreeFileSimpleWriter.Create() with API sorted historian file read with duplicate handling using (GSFHistorianStream stream = new GSFHistorianStream(this, sourceFileName, instanceName, duplicateDataOutput)) { string completeFileName = GetDestinationFileName(stream.ArchiveFile, sourceFileName, instanceName, destinationPath, method); string pendingFileName = Path.Combine(FilePath.GetDirectoryName(completeFileName), FilePath.GetFileNameWithoutExtension(completeFileName) + ".~d2i"); SortedTreeFileSimpleWriter <HistorianKey, HistorianValue> .Create(pendingFileName, completeFileName, 4096, null, encoder.EncodingMethod, stream); migratedPoints = stream.PointCount; } } Ticks totalTime = DateTime.UtcNow.Ticks - fileConversionStartTime; ShowUpdateMessage( "{0}Migrated {1:#,##0} points for last file in {2} at {3:#,##0} points per second.{0}", Environment.NewLine, migratedPoints, totalTime.ToElapsedTimeString(3), migratedPoints / totalTime.ToSeconds()); Interlocked.Increment(ref processedFiles); Interlocked.Add(ref totalProcessedPoints, migratedPoints); UpdateProgressBar(processedFiles); if (m_formClosing) { loopState.Break(); } }); } if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Migration Complete ***"); ShowUpdateMessage("Total migration time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), totalProcessedPoints / totalTime.ToSeconds()); UpdateProgressBar(sourceFileNames.Length); } } catch (Exception ex) { ShowUpdateMessage("Failure during migration: {0}", ex.Message); } finally { m_operationStarted = false; } }
private void FastMigration(object state) { try { long operationStartTime = DateTime.UtcNow.Ticks; Dictionary<string, string> parameters = state as Dictionary<string, string>; if ((object)parameters == null) throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); IEnumerable<string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); int maxThreads, methodIndex; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) maxThreads = m_defaultMaxThreads; if (!int.TryParse(parameters["directoryNamingMethod"], out methodIndex) || !Enum.IsDefined(typeof(ArchiveDirectoryMethod), methodIndex)) methodIndex = (int)ArchiveDirectoryMethod.YearThenMonth; ArchiveDirectoryMethod method = (ArchiveDirectoryMethod)methodIndex; HistorianFileEncoding encoder = new HistorianFileEncoding(); string[] sourceFileNames = sourceFiles.ToArray(); string destinationPath = parameters["destinationFilesLocation"]; string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long totalProcessedPoints = 0; int processedFiles = 0; SetProgressMaximum(sourceFileNames.Length); using (StreamWriter duplicateDataOutput = File.CreateText(FilePath.GetAbsolutePath("DuplicateData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Migrating \"{0}\"...", FilePath.GetFileName(sourceFileName)); long fileConversionStartTime = DateTime.UtcNow.Ticks; long migratedPoints; if (ignoreDuplicates) { // Migrate using SortedTreeFileSimpleWriter.CreateNonSequential() with raw unsorted historian file read migratedPoints = ConvertArchiveFile.ConvertVersion1FileIgnoreDuplicates( sourceFileName, GetDestinationFileName(sourceFileName, instanceName, destinationPath, method), encoder.EncodingMethod); } else { // Migrate using SortedTreeFileSimpleWriter.Create() with API sorted historian file read with duplicate handling using (GSFHistorianStream stream = new GSFHistorianStream(this, sourceFileName, instanceName, duplicateDataOutput)) { string completeFileName = GetDestinationFileName(stream.ArchiveFile, sourceFileName, instanceName, destinationPath, method); string pendingFileName = Path.Combine(FilePath.GetDirectoryName(completeFileName), FilePath.GetFileNameWithoutExtension(completeFileName) + ".~d2i"); SortedTreeFileSimpleWriter<HistorianKey, HistorianValue>.Create(pendingFileName, completeFileName, 4096, null, encoder.EncodingMethod, stream); migratedPoints = stream.PointCount; } } Ticks totalTime = DateTime.UtcNow.Ticks - fileConversionStartTime; ShowUpdateMessage( "{0}Migrated {1:#,##0} points for last file in {2} at {3:#,##0} points per second.{0}", Environment.NewLine, migratedPoints, totalTime.ToElapsedTimeString(3), migratedPoints / totalTime.ToSeconds()); Interlocked.Increment(ref processedFiles); Interlocked.Add(ref totalProcessedPoints, migratedPoints); UpdateProgressBar(processedFiles); if (m_formClosing) loopState.Break(); }); } if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Migration Complete ***"); ShowUpdateMessage("Total migration time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), totalProcessedPoints / totalTime.ToSeconds()); UpdateProgressBar(sourceFileNames.Length); } } catch (Exception ex) { ShowUpdateMessage("Failure during migration: {0}", ex.Message); } finally { m_operationStarted = false; } }