public SnapDBClient(SnapDBEngine engine, string instanceName) { m_client = SnapClient.Connect(engine.ServerHost); m_database = m_client.GetDatabase <HistorianKey, HistorianValue>(instanceName); m_key = new HistorianKey(); m_value = new HistorianValue(); m_lastKey = new HistorianKey(); }
private void CompareArchives(object state) { try { const int MessageInterval = 1000000; Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary <string, string> parameters = state as Dictionary <string, string>; if ((object)parameters == null) { throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); } ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) { throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); } IEnumerable <string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) { sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); } // Start calculating total number of source points m_pointCount = 0; ThreadPool.QueueUserWorkItem(CalculateSourcePointCount, new[] { parameters["sourceFilesLocation"], parameters["sourceFilesOffloadLocation"] }); int maxThreads; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) { maxThreads = m_defaultMaxThreads; } string[] sourceFileNames = sourceFiles.ToArray(); string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long comparedPoints = 0; long validPoints = 0; long invalidPoints = 0; long missingPoints = 0; long duplicatePoints = 0; long resyncs = 0; long displayMessageCount = MessageInterval; SetProgressMaximum(100); using (SnapDBEngine engine = new SnapDBEngine(this, instanceName, parameters["destinationFilesLocation"], parameters["targetFileSize"], parameters["directoryNamingMethod"])) using (StreamWriter missingDataOutput = File.CreateText(FilePath.GetAbsolutePath("MissingData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Comparing \"{0}\"...", FilePath.GetFileName(sourceFileName)); DataPoint sourcePoint = new DataPoint(); DataPoint destinationPoint = new DataPoint(); DataPoint lastPoint = new DataPoint(); Ticks readStartTime = DateTime.UtcNow.Ticks; bool updateProgress, resync, readInitialized = false; using (GSFHistorianStream sourceStream = new GSFHistorianStream(this, sourceFileName, instanceName)) using (SnapDBClient client = new SnapDBClient(engine, sourceStream.InstanceName)) { while (true) { if (sourceStream.ReadNext(sourcePoint)) { if (ignoreDuplicates) { bool success = true; while (success && sourcePoint.PointID == lastPoint.PointID && sourcePoint.Timestamp == lastPoint.Timestamp) { Interlocked.Increment(ref duplicatePoints); success = sourceStream.ReadNext(sourcePoint); } // Finished with source read if (!success) { break; } } if (readInitialized) { if (!client.ReadNextSnapDBPoint(destinationPoint)) { ShowUpdateMessage("*** Compare for \"{0}\" Failed: Destination Read Was Short ***", FilePath.GetFileName(sourceFileName)); break; } } else { readInitialized = client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); } } else { // Finished with source read break; } resync = false; do { if (resync) { Interlocked.Increment(ref resyncs); } // See if source and destination points match if (sourcePoint.PointID == destinationPoint.PointID && sourcePoint.Timestamp == destinationPoint.Timestamp) { if (sourcePoint.Value == destinationPoint.Value) { if (sourcePoint.Flags == destinationPoint.Flags) { Interlocked.Increment(ref validPoints); } else { Interlocked.Increment(ref invalidPoints); } } else { Interlocked.Increment(ref invalidPoints); } resync = false; } else { // Attempt to resynchronize readers by rescanning to point if we didn't find point and are not resynchronizing already resync = !resync && client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); if (!resync) { Interlocked.Increment(ref missingPoints); lock (missingDataOutput) missingDataOutput.WriteLine("[{0:00000}@{1:yyyy-MM-dd HH:mm:ss.fff}] = {2}({3})", sourcePoint.PointID, new DateTime((long)sourcePoint.Timestamp, DateTimeKind.Utc), sourcePoint.ValueAsSingle, sourcePoint.Flags); } } }while (resync); // Update last point if (ignoreDuplicates) { sourcePoint.Clone(lastPoint); } updateProgress = false; if (Interlocked.Increment(ref comparedPoints) == displayMessageCount) { if (comparedPoints % (5 * MessageInterval) == 0) { ShowUpdateMessage("{0}*** Compared {1:#,##0} points so far averaging {2:#,##0} points per second ***{0}", Environment.NewLine, comparedPoints, comparedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds()); } else { ShowUpdateMessage("{0}Found {1:#,##0} valid, {2:#,##0} invalid and {3:#,##0} missing points during compare so far...{0}", Environment.NewLine, validPoints, invalidPoints, missingPoints); } updateProgress = true; displayMessageCount += MessageInterval; } // Note that point count used here is estimated if (updateProgress && m_pointCount > 0) { UpdateProgressBar((int)((comparedPoints / (double)m_pointCount) * 100.0D)); } } } if (m_formClosing) { loopState.Break(); } }); if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Compare Complete ***"); ShowUpdateMessage("Total compare time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), comparedPoints / totalTime.ToSeconds()); UpdateProgressBar(100); ShowUpdateMessage("{0}" + "Total points compared: {1:#,##0}{0}" + " Valid points: {2:#,##0}{0}" + " Invalid points: {3:#,##0}{0}" + " Missing points: {4:#,##0}{0}" + " Duplicate points: {5:#,##0}{0}" + " Resynchronizations: {6:#,##0}{0}" + " Source point count: {7:#,##0}{0}" + "{0}Migrated data conversion {8:##0.000}% accurate", Environment.NewLine, comparedPoints, validPoints, invalidPoints, missingPoints, duplicatePoints, resyncs, comparedPoints + missingPoints, Math.Truncate(validPoints / (double)(comparedPoints + missingPoints) * 100000.0D) / 1000.0D); if (ignoreDuplicates && invalidPoints > 0 && duplicatePoints >= invalidPoints) { ShowUpdateMessage( "{0}Note: Since duplicated source data was being ignored and duplicate points outnumber (or are equal to) " + "invalid points, the invalid data is likely an artifact of comparing a duplicated source record that was " + "not archived into the destination.{0}", Environment.NewLine); } } } } catch (Exception ex) { ShowUpdateMessage("Failure during compare: {0}", ex.Message); } finally { m_operationStarted = false; } }
private void LiveMigration(object state) { try { const int MessageInterval = 1000000; Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary <string, string> parameters = state as Dictionary <string, string>; if ((object)parameters == null) { throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); } ClearUpdateMessages(); string instanceName = OpenGSFHistorianArchive( parameters["sourceFilesLocation"], parameters["sourceFilesOffloadLocation"], parameters["instanceName"]); UpdateInstanceName(instanceName); if (!m_archiveReady.Wait(300000)) { ShowUpdateMessage("Still initializing source historian after 5 minutes..."); m_archiveReady.Wait(); } bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); DataPoint point = new DataPoint(); long migratedPoints = 0; long displayMessageCount = MessageInterval; SetProgressMaximum(100); Ticks readStartTime = DateTime.UtcNow.Ticks; using (SnapDBEngine engine = new SnapDBEngine(this, instanceName, parameters["destinationFilesLocation"], parameters["targetFileSize"], parameters["directoryNamingMethod"])) using (SnapDBClient client = new SnapDBClient(engine, instanceName)) { while (ReadNextGSFHistorianPoint(point)) { client.WriteSnapDBData(point, ignoreDuplicates); migratedPoints++; if (migratedPoints == displayMessageCount) { ShowUpdateMessage("{0}Migrated {1:#,##0} points so far averaging {2:#,##0} points per second...{0}", Environment.NewLine, migratedPoints, migratedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds()); if (m_pointCount > 0) { UpdateProgressBar((int)((migratedPoints / (double)m_pointCount) * 100.0D)); } displayMessageCount += MessageInterval; } if (m_formClosing) { break; } } if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { client.FlushSnapDB(); ShowUpdateMessage("*** Migration Complete ***"); ShowUpdateMessage("Total migration time {0}", (DateTime.UtcNow.Ticks - operationStartTime).ToElapsedTimeString(3)); UpdateProgressBar(100); } } } catch (Exception ex) { ShowUpdateMessage("Failure during migration: {0}", ex.Message); } finally { m_operationStarted = false; CloseGSFHistorianArchive(); } }
private void CompareArchives(object state) { try { const int MessageInterval = 1000000; Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary<string, string> parameters = state as Dictionary<string, string>; if ((object)parameters == null) throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); IEnumerable<string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); // Start calculating total number of source points m_pointCount = 0; ThreadPool.QueueUserWorkItem(CalculateSourcePointCount, new[] { parameters["sourceFilesLocation"], parameters["sourceFilesOffloadLocation"] }); int maxThreads; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) maxThreads = m_defaultMaxThreads; string[] sourceFileNames = sourceFiles.ToArray(); string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long comparedPoints = 0; long validPoints = 0; long invalidPoints = 0; long missingPoints = 0; long duplicatePoints = 0; long resyncs = 0; long displayMessageCount = MessageInterval; SetProgressMaximum(100); using (SnapDBEngine engine = new SnapDBEngine(this, instanceName, parameters["destinationFilesLocation"], parameters["targetFileSize"], parameters["directoryNamingMethod"])) using (StreamWriter missingDataOutput = File.CreateText(FilePath.GetAbsolutePath("MissingData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Comparing \"{0}\"...", FilePath.GetFileName(sourceFileName)); DataPoint sourcePoint = new DataPoint(); DataPoint destinationPoint = new DataPoint(); DataPoint lastPoint = new DataPoint(); Ticks readStartTime = DateTime.UtcNow.Ticks; bool updateProgress, resync, readInitialized = false; using (GSFHistorianStream sourceStream = new GSFHistorianStream(this, sourceFileName, instanceName)) using (SnapDBClient client = new SnapDBClient(engine, sourceStream.InstanceName)) { while (true) { if (sourceStream.ReadNext(sourcePoint)) { if (ignoreDuplicates) { bool success = true; while (success && sourcePoint.PointID == lastPoint.PointID && sourcePoint.Timestamp == lastPoint.Timestamp) { Interlocked.Increment(ref duplicatePoints); success = sourceStream.ReadNext(sourcePoint); } // Finished with source read if (!success) break; } if (readInitialized) { if (!client.ReadNextSnapDBPoint(destinationPoint)) { ShowUpdateMessage("*** Compare for \"{0}\" Failed: Destination Read Was Short ***", FilePath.GetFileName(sourceFileName)); break; } } else { readInitialized = client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); } } else { // Finished with source read break; } resync = false; do { if (resync) Interlocked.Increment(ref resyncs); // See if source and destination points match if (sourcePoint.PointID == destinationPoint.PointID && sourcePoint.Timestamp == destinationPoint.Timestamp) { if (sourcePoint.Value == destinationPoint.Value) { if (sourcePoint.Flags == destinationPoint.Flags) Interlocked.Increment(ref validPoints); else Interlocked.Increment(ref invalidPoints); } else { Interlocked.Increment(ref invalidPoints); } resync = false; } else { // Attempt to resynchronize readers by rescanning to point if we didn't find point and are not resynchronizing already resync = !resync && client.ScanToSnapDBPoint(sourcePoint.Timestamp, sourcePoint.PointID, destinationPoint); if (!resync) { Interlocked.Increment(ref missingPoints); lock (missingDataOutput) missingDataOutput.WriteLine("[{0:00000}@{1:yyyy-MM-dd HH:mm:ss.fff}] = {2}({3})", sourcePoint.PointID, new DateTime((long)sourcePoint.Timestamp, DateTimeKind.Utc), sourcePoint.ValueAsSingle, sourcePoint.Flags); } } } while (resync); // Update last point if (ignoreDuplicates) sourcePoint.Clone(lastPoint); updateProgress = false; if (Interlocked.Increment(ref comparedPoints) == displayMessageCount) { if (comparedPoints % (5 * MessageInterval) == 0) ShowUpdateMessage("{0}*** Compared {1:#,##0} points so far averaging {2:#,##0} points per second ***{0}", Environment.NewLine, comparedPoints, comparedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds()); else ShowUpdateMessage("{0}Found {1:#,##0} valid, {2:#,##0} invalid and {3:#,##0} missing points during compare so far...{0}", Environment.NewLine, validPoints, invalidPoints, missingPoints); updateProgress = true; displayMessageCount += MessageInterval; } // Note that point count used here is estimated if (updateProgress && m_pointCount > 0) UpdateProgressBar((int)((comparedPoints / (double)m_pointCount) * 100.0D)); } } if (m_formClosing) loopState.Break(); }); if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Compare Complete ***"); ShowUpdateMessage("Total compare time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), comparedPoints / totalTime.ToSeconds()); UpdateProgressBar(100); ShowUpdateMessage("{0}" + "Total points compared: {1:#,##0}{0}" + " Valid points: {2:#,##0}{0}" + " Invalid points: {3:#,##0}{0}" + " Missing points: {4:#,##0}{0}" + " Duplicate points: {5:#,##0}{0}" + " Resynchronizations: {6:#,##0}{0}" + " Source point count: {7:#,##0}{0}" + "{0}Migrated data conversion {8:##0.000}% accurate", Environment.NewLine, comparedPoints, validPoints, invalidPoints, missingPoints, duplicatePoints, resyncs, comparedPoints + missingPoints, Math.Truncate(validPoints / (double)(comparedPoints + missingPoints) * 100000.0D) / 1000.0D); if (ignoreDuplicates && invalidPoints > 0 && duplicatePoints >= invalidPoints) ShowUpdateMessage( "{0}Note: Since duplicated source data was being ignored and duplicate points outnumber (or are equal to) " + "invalid points, the invalid data is likely an artifact of comparing a duplicated source record that was " + "not archived into the destination.{0}", Environment.NewLine); } } } catch (Exception ex) { ShowUpdateMessage("Failure during compare: {0}", ex.Message); } finally { m_operationStarted = false; } }
private void LiveMigration(object state) { try { const int MessageInterval = 1000000; Ticks operationStartTime = DateTime.UtcNow.Ticks; Dictionary<string, string> parameters = state as Dictionary<string, string>; if ((object)parameters == null) throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); ClearUpdateMessages(); string instanceName = OpenGSFHistorianArchive( parameters["sourceFilesLocation"], parameters["sourceFilesOffloadLocation"], parameters["instanceName"]); UpdateInstanceName(instanceName); if (!m_archiveReady.Wait(300000)) { ShowUpdateMessage("Still initializing source historian after 5 minutes..."); m_archiveReady.Wait(); } bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); DataPoint point = new DataPoint(); long migratedPoints = 0; long displayMessageCount = MessageInterval; SetProgressMaximum(100); Ticks readStartTime = DateTime.UtcNow.Ticks; using (SnapDBEngine engine = new SnapDBEngine(this, instanceName, parameters["destinationFilesLocation"], parameters["targetFileSize"], parameters["directoryNamingMethod"])) using (SnapDBClient client = new SnapDBClient(engine, instanceName)) { while (ReadNextGSFHistorianPoint(point)) { client.WriteSnapDBData(point, ignoreDuplicates); migratedPoints++; if (migratedPoints == displayMessageCount) { ShowUpdateMessage("{0}Migrated {1:#,##0} points so far averaging {2:#,##0} points per second...{0}", Environment.NewLine, migratedPoints, migratedPoints / (DateTime.UtcNow.Ticks - readStartTime).ToSeconds()); if (m_pointCount > 0) UpdateProgressBar((int)((migratedPoints / (double)m_pointCount) * 100.0D)); displayMessageCount += MessageInterval; } if (m_formClosing) break; } if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { client.FlushSnapDB(); ShowUpdateMessage("*** Migration Complete ***"); ShowUpdateMessage("Total migration time {0}", (DateTime.UtcNow.Ticks - operationStartTime).ToElapsedTimeString(3)); UpdateProgressBar(100); } } } catch (Exception ex) { ShowUpdateMessage("Failure during migration: {0}", ex.Message); } finally { m_operationStarted = false; CloseGSFHistorianArchive(); } }