private void FastMigration(object state) { try { long operationStartTime = DateTime.UtcNow.Ticks; Dictionary <string, string> parameters = state as Dictionary <string, string>; if ((object)parameters == null) { throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); } ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) { throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); } IEnumerable <string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) { sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); } int maxThreads, methodIndex; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) { maxThreads = m_defaultMaxThreads; } if (!int.TryParse(parameters["directoryNamingMethod"], out methodIndex) || !Enum.IsDefined(typeof(ArchiveDirectoryMethod), methodIndex)) { methodIndex = (int)ArchiveDirectoryMethod.YearThenMonth; } ArchiveDirectoryMethod method = (ArchiveDirectoryMethod)methodIndex; HistorianFileEncoding encoder = new HistorianFileEncoding(); string[] sourceFileNames = sourceFiles.ToArray(); string destinationPath = parameters["destinationFilesLocation"]; string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long totalProcessedPoints = 0; int processedFiles = 0; SetProgressMaximum(sourceFileNames.Length); using (StreamWriter duplicateDataOutput = File.CreateText(FilePath.GetAbsolutePath("DuplicateData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Migrating \"{0}\"...", FilePath.GetFileName(sourceFileName)); long fileConversionStartTime = DateTime.UtcNow.Ticks; long migratedPoints; if (ignoreDuplicates) { // Migrate using SortedTreeFileSimpleWriter.CreateNonSequential() with raw unsorted historian file read migratedPoints = ConvertArchiveFile.ConvertVersion1FileIgnoreDuplicates( sourceFileName, GetDestinationFileName(sourceFileName, instanceName, destinationPath, method), encoder.EncodingMethod); } else { // Migrate using SortedTreeFileSimpleWriter.Create() with API sorted historian file read with duplicate handling using (GSFHistorianStream stream = new GSFHistorianStream(this, sourceFileName, instanceName, duplicateDataOutput)) { string completeFileName = GetDestinationFileName(stream.ArchiveFile, sourceFileName, instanceName, destinationPath, method); string pendingFileName = Path.Combine(FilePath.GetDirectoryName(completeFileName), FilePath.GetFileNameWithoutExtension(completeFileName) + ".~d2i"); SortedTreeFileSimpleWriter <HistorianKey, HistorianValue> .Create(pendingFileName, completeFileName, 4096, null, encoder.EncodingMethod, stream); migratedPoints = stream.PointCount; } } Ticks totalTime = DateTime.UtcNow.Ticks - fileConversionStartTime; ShowUpdateMessage( "{0}Migrated {1:#,##0} points for last file in {2} at {3:#,##0} points per second.{0}", Environment.NewLine, migratedPoints, totalTime.ToElapsedTimeString(3), migratedPoints / totalTime.ToSeconds()); Interlocked.Increment(ref processedFiles); Interlocked.Add(ref totalProcessedPoints, migratedPoints); UpdateProgressBar(processedFiles); if (m_formClosing) { loopState.Break(); } }); } if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Migration Complete ***"); ShowUpdateMessage("Total migration time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), totalProcessedPoints / totalTime.ToSeconds()); UpdateProgressBar(sourceFileNames.Length); } } catch (Exception ex) { ShowUpdateMessage("Failure during migration: {0}", ex.Message); } finally { m_operationStarted = false; } }
private void FastMigration(object state) { try { long operationStartTime = DateTime.UtcNow.Ticks; Dictionary<string, string> parameters = state as Dictionary<string, string>; if ((object)parameters == null) throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); IEnumerable<string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); int maxThreads, methodIndex; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) maxThreads = m_defaultMaxThreads; if (!int.TryParse(parameters["directoryNamingMethod"], out methodIndex) || !Enum.IsDefined(typeof(ArchiveDirectoryMethod), methodIndex)) methodIndex = (int)ArchiveDirectoryMethod.YearThenMonth; ArchiveDirectoryMethod method = (ArchiveDirectoryMethod)methodIndex; HistorianFileEncoding encoder = new HistorianFileEncoding(); string[] sourceFileNames = sourceFiles.ToArray(); string destinationPath = parameters["destinationFilesLocation"]; string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long totalProcessedPoints = 0; int processedFiles = 0; SetProgressMaximum(sourceFileNames.Length); using (StreamWriter duplicateDataOutput = File.CreateText(FilePath.GetAbsolutePath("DuplicateData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Migrating \"{0}\"...", FilePath.GetFileName(sourceFileName)); long fileConversionStartTime = DateTime.UtcNow.Ticks; long migratedPoints; if (ignoreDuplicates) { // Migrate using SortedTreeFileSimpleWriter.CreateNonSequential() with raw unsorted historian file read migratedPoints = ConvertArchiveFile.ConvertVersion1FileIgnoreDuplicates( sourceFileName, GetDestinationFileName(sourceFileName, instanceName, destinationPath, method), encoder.EncodingMethod); } else { // Migrate using SortedTreeFileSimpleWriter.Create() with API sorted historian file read with duplicate handling using (GSFHistorianStream stream = new GSFHistorianStream(this, sourceFileName, instanceName, duplicateDataOutput)) { string completeFileName = GetDestinationFileName(stream.ArchiveFile, sourceFileName, instanceName, destinationPath, method); string pendingFileName = Path.Combine(FilePath.GetDirectoryName(completeFileName), FilePath.GetFileNameWithoutExtension(completeFileName) + ".~d2i"); SortedTreeFileSimpleWriter<HistorianKey, HistorianValue>.Create(pendingFileName, completeFileName, 4096, null, encoder.EncodingMethod, stream); migratedPoints = stream.PointCount; } } Ticks totalTime = DateTime.UtcNow.Ticks - fileConversionStartTime; ShowUpdateMessage( "{0}Migrated {1:#,##0} points for last file in {2} at {3:#,##0} points per second.{0}", Environment.NewLine, migratedPoints, totalTime.ToElapsedTimeString(3), migratedPoints / totalTime.ToSeconds()); Interlocked.Increment(ref processedFiles); Interlocked.Add(ref totalProcessedPoints, migratedPoints); UpdateProgressBar(processedFiles); if (m_formClosing) loopState.Break(); }); } if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Migration Complete ***"); ShowUpdateMessage("Total migration time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), totalProcessedPoints / totalTime.ToSeconds()); UpdateProgressBar(sourceFileNames.Length); } } catch (Exception ex) { ShowUpdateMessage("Failure during migration: {0}", ex.Message); } finally { m_operationStarted = false; } }