/// <summary> /// Creates a new <see cref="SortedTreeTable{TKey,TValue}"/> based on the settings passed to this class. /// Once created, it is up to he caller to make sure that this class is properly disposed of. /// </summary> /// <param name="startKey">the first key in the archive file</param> /// <param name="endKey">the last key in the archive file</param> /// <param name="estimatedSize">The estimated size of the file. -1 to ignore this feature and write to the first available directory.</param> /// <param name="archiveIdCallback">the archiveId to assign to the new file.</param> /// <returns></returns> public SortedTreeTable <TKey, TValue> CreateArchiveFile(TKey startKey, TKey endKey, long estimatedSize, TreeStream <TKey, TValue> data, Action <Guid> archiveIdCallback) { SimplifiedArchiveInitializerSettings settings = m_settings; string pendingFile = CreateArchiveName(GetPathWithEnoughSpace(estimatedSize), startKey, endKey); string finalFile = Path.ChangeExtension(pendingFile, settings.FinalExtension); SortedTreeFileSimpleWriter <TKey, TValue> .Create(pendingFile, finalFile, 4096, archiveIdCallback, settings.EncodingMethod, data, settings.Flags.ToArray()); return(SortedTreeFile.OpenFile(finalFile, true).OpenTable <TKey, TValue>()); }
public static long ConvertVersion1FileHandleDuplicates(string oldFileName, string newFileName, EncodingDefinition compressionMethod, out long readTime, out long sortTime, out long writeTime) { if (!File.Exists(oldFileName)) { throw new ArgumentException("Old file does not exist", "oldFileName"); } if (File.Exists(newFileName)) { throw new ArgumentException("New file already exists", "newFileName"); } HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); long startTime; int count; // Derived SortedPointBuffer class increments EntryNumbers instead of removing duplicates SortedPointBuffer points; startTime = DateTime.UtcNow.Ticks; using (OldHistorianReader archiveFile = new OldHistorianReader()) { archiveFile.Open(oldFileName); count = archiveFile.PointsArchived; points = new SortedPointBuffer(count, false); foreach (OldHistorianReader.DataPoint point in archiveFile.Read()) { key.Timestamp = (ulong)point.Timestamp.Ticks; key.PointID = (ulong)point.PointID; value.Value1 = BitConvert.ToUInt64(point.Value); value.Value3 = (ulong)point.Flags; points.TryEnqueue(key, value); } } readTime = DateTime.UtcNow.Ticks - startTime; startTime = DateTime.UtcNow.Ticks; points.IsReadingMode = true; sortTime = DateTime.UtcNow.Ticks - startTime; startTime = DateTime.UtcNow.Ticks; SortedTreeFileSimpleWriter <HistorianKey, HistorianValue> .Create(Path.Combine(FilePath.GetDirectoryName(newFileName), FilePath.GetFileNameWithoutExtension(newFileName) + ".~d2i"), newFileName, 4096, null, compressionMethod, points); writeTime = DateTime.UtcNow.Ticks - startTime; return(count); }
public static long ConvertVersion1FileIgnoreDuplicates(string oldFileName, string newFileName, EncodingDefinition compressionMethod) { if (!File.Exists(oldFileName)) { throw new ArgumentException("Old file does not exist", "oldFileName"); } if (File.Exists(newFileName)) { throw new ArgumentException("New file already exists", "newFileName"); } using (OldHistorianStream reader = new OldHistorianStream(oldFileName)) { SortedTreeFileSimpleWriter <HistorianKey, HistorianValue> .CreateNonSequential(Path.Combine(FilePath.GetDirectoryName(newFileName), FilePath.GetFileNameWithoutExtension(newFileName) + ".~d2i"), newFileName, 4096, null, compressionMethod, reader); return(reader.PointCount); } }
private void FastMigration(object state) { try { long operationStartTime = DateTime.UtcNow.Ticks; Dictionary <string, string> parameters = state as Dictionary <string, string>; if ((object)parameters == null) { throw new ArgumentNullException("state", "Could not interpret thread state as parameters dictionary"); } ClearUpdateMessages(); ShowUpdateMessage("Scanning source files..."); if (!Directory.Exists(parameters["sourceFilesLocation"])) { throw new DirectoryNotFoundException(string.Format("Source directory \"{0}\" not found.", parameters["sourceFilesLocation"])); } IEnumerable <string> sourceFiles = Directory.EnumerateFiles(parameters["sourceFilesLocation"], "*.d", SearchOption.TopDirectoryOnly); if (Directory.Exists(parameters["sourceFilesOffloadLocation"])) { sourceFiles = sourceFiles.Concat(Directory.EnumerateFiles(parameters["sourceFilesOffloadLocation"], "*.d", SearchOption.TopDirectoryOnly)); } int maxThreads, methodIndex; if (!int.TryParse(parameters["maxThreads"], out maxThreads)) { maxThreads = m_defaultMaxThreads; } if (!int.TryParse(parameters["directoryNamingMethod"], out methodIndex) || !Enum.IsDefined(typeof(ArchiveDirectoryMethod), methodIndex)) { methodIndex = (int)ArchiveDirectoryMethod.YearThenMonth; } ArchiveDirectoryMethod method = (ArchiveDirectoryMethod)methodIndex; HistorianFileEncoding encoder = new HistorianFileEncoding(); string[] sourceFileNames = sourceFiles.ToArray(); string destinationPath = parameters["destinationFilesLocation"]; string instanceName = parameters["instanceName"]; bool ignoreDuplicates = parameters["ignoreDuplicates"].ParseBoolean(); long totalProcessedPoints = 0; int processedFiles = 0; SetProgressMaximum(sourceFileNames.Length); using (StreamWriter duplicateDataOutput = File.CreateText(FilePath.GetAbsolutePath("DuplicateData.txt"))) { Parallel.ForEach(sourceFileNames, new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (sourceFileName, loopState) => { ShowUpdateMessage("Migrating \"{0}\"...", FilePath.GetFileName(sourceFileName)); long fileConversionStartTime = DateTime.UtcNow.Ticks; long migratedPoints; if (ignoreDuplicates) { // Migrate using SortedTreeFileSimpleWriter.CreateNonSequential() with raw unsorted historian file read migratedPoints = ConvertArchiveFile.ConvertVersion1FileIgnoreDuplicates( sourceFileName, GetDestinationFileName(sourceFileName, instanceName, destinationPath, method), encoder.EncodingMethod); } else { // Migrate using SortedTreeFileSimpleWriter.Create() with API sorted historian file read with duplicate handling using (GSFHistorianStream stream = new GSFHistorianStream(this, sourceFileName, instanceName, duplicateDataOutput)) { string completeFileName = GetDestinationFileName(stream.ArchiveFile, sourceFileName, instanceName, destinationPath, method); string pendingFileName = Path.Combine(FilePath.GetDirectoryName(completeFileName), FilePath.GetFileNameWithoutExtension(completeFileName) + ".~d2i"); SortedTreeFileSimpleWriter <HistorianKey, HistorianValue> .Create(pendingFileName, completeFileName, 4096, null, encoder.EncodingMethod, stream); migratedPoints = stream.PointCount; } } Ticks totalTime = DateTime.UtcNow.Ticks - fileConversionStartTime; ShowUpdateMessage( "{0}Migrated {1:#,##0} points for last file in {2} at {3:#,##0} points per second.{0}", Environment.NewLine, migratedPoints, totalTime.ToElapsedTimeString(3), migratedPoints / totalTime.ToSeconds()); Interlocked.Increment(ref processedFiles); Interlocked.Add(ref totalProcessedPoints, migratedPoints); UpdateProgressBar(processedFiles); if (m_formClosing) { loopState.Break(); } }); } if (m_formClosing) { ShowUpdateMessage("Migration canceled."); UpdateProgressBar(0); } else { Ticks totalTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Migration Complete ***"); ShowUpdateMessage("Total migration time {0} at {1:#,##0} points per second.", totalTime.ToElapsedTimeString(3), totalProcessedPoints / totalTime.ToSeconds()); UpdateProgressBar(sourceFileNames.Length); } } catch (Exception ex) { ShowUpdateMessage("Failure during migration: {0}", ex.Message); } finally { m_operationStarted = false; } }