private void ProcessSeriesLevelDelete(Model.WorkQueue item) { // ensure the Study is loaded. Study study = StorageLocation.Study; Platform.CheckForNullReference(study, "Study record doesn't exist"); Platform.Log(LogLevel.Info, "Processing Series Level Deletion for Study {0}, A#: {1}", study.StudyInstanceUid, study.AccessionNumber); _seriesToDelete = new List<Series>(); bool completed = false; try { // Load the list of Series to be deleted from the WorkQueueUid LoadUids(item); // Go through the list of series and add commands // to delete each of them. It's all or nothing. using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting Series from study {0}, A#:{1}, Patient: {2}, ID:{3}", study.StudyInstanceUid, study.AccessionNumber, study.PatientsName, study.PatientId))) { StudyXml studyXml = StorageLocation.LoadStudyXml(); IDictionary<string, Series> existingSeries = StorageLocation.Study.Series; // Add commands to delete the folders and update the xml foreach (WorkQueueUid uid in WorkQueueUidList) { // Delete from study XML if (studyXml.Contains(uid.SeriesInstanceUid)) { //Note: DeleteDirectoryCommand doesn't throw exception if the folder doesn't exist var xmlUpdate = new RemoveSeriesFromStudyXml(studyXml, uid.SeriesInstanceUid); processor.AddCommand(xmlUpdate); } // Delete from filesystem string path = StorageLocation.GetSeriesPath(uid.SeriesInstanceUid); if (Directory.Exists(path)) { var delDir = new DeleteDirectoryCommand(path, true); processor.AddCommand(delDir); } } // flush the updated xml to disk processor.AddCommand(new SaveXmlCommand(studyXml, StorageLocation)); // Update the db.. NOTE: these commands are executed at the end. foreach (WorkQueueUid uid in WorkQueueUidList) { // Delete from DB WorkQueueUid queueUid = uid; Series theSeries = existingSeries[queueUid.SeriesInstanceUid]; if (theSeries!=null) { _seriesToDelete.Add(theSeries); var delSeries = new DeleteSeriesFromDBCommand(StorageLocation, theSeries); processor.AddCommand(delSeries); delSeries.Executing += DeleteSeriesFromDbExecuting; } else { // Series doesn't exist Platform.Log(LogLevel.Info, "Series {0} is invalid or no longer exists", uid.SeriesInstanceUid); } // The WorkQueueUid must be cleared before the entry can be removed from the queue var deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteUid); // Force a re-archival if necessary processor.AddCommand(new InsertArchiveQueueCommand(item.ServerPartitionKey, item.StudyStorageKey)); } if (!processor.Execute()) throw new ApplicationException( String.Format("Error occurred when series from Study {0}, A#: {1}", study.StudyInstanceUid, study.AccessionNumber), processor.FailureException); else { foreach (Series series in _seriesToDelete) { OnSeriesDeleted(series); } } } completed = true; } finally { if (completed) { OnCompleted(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } } }
/// <summary> /// Removes all WorkQueueUids from the database and delete the corresponding DICOM files from the filesystem. /// </summary> private void ProcessWorkQueueUids() { if (Study == null) { Platform.Log(LogLevel.Info, "Begin StudyProcess Cleanup (Study has not been created): Attempt #{0}. {1} unprocessed files will be removed", WorkQueueItem.FailureCount + 1, WorkQueueUidList.Count); } else { Platform.Log(LogLevel.Info, "Begin StudyProcess Cleanup for study {0}, Patient {1} (PatientId:{2} A#:{3}) on Partition {4}. Attempt #{5}. {6} unprocessed files will be removed", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description, WorkQueueItem.FailureCount + 1, WorkQueueUidList.Count ); } foreach (WorkQueueUid sop in WorkQueueUidList) { string path = GetFileStoredPath(sop); Platform.Log(LogLevel.Info, "Cleaning up {0}", path); using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", sop.SopInstanceUid))) { // delete the file FileDeleteCommand deleteFile = new FileDeleteCommand(path, true); processor.AddCommand(deleteFile); // delete the WorkQueueUID from the database DeleteWorkQueueUidCommand deleteUid = new DeleteWorkQueueUidCommand(sop); processor.AddCommand(deleteUid); try { // delete the directory (if empty) var fileInfo = new FileInfo(path); ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand deleteDir = new ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand(fileInfo.Directory.FullName, false, true); processor.AddCommand(deleteDir); } catch (DirectoryNotFoundException ex) { // ignore } if (!processor.Execute()) { throw new Exception(String.Format("Unable to delete SOP {0}", sop.SopInstanceUid), processor.FailureException); } } // Delete the base directory if it's empty var baseDir = GetBaseDirectory(sop); if (Directory.Exists(baseDir)) { using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", sop.SopInstanceUid))) { ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand deleteDir = new ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand(baseDir, false, true); processor.AddCommand(deleteDir); if (!processor.Execute()) { throw new Exception(String.Format("Unable to delete {0}", baseDir), processor.FailureException); } } } } }
private void ProcessSeriesLevelDelete(Model.WorkQueue item) { // ensure the Study is loaded. Study study = StorageLocation.Study; Platform.CheckForNullReference(study, "Study record doesn't exist"); Platform.Log(LogLevel.Info, "Processing Series Level Deletion for Study {0}, A#: {1}", study.StudyInstanceUid, study.AccessionNumber); _seriesToDelete = new List <Series>(); bool completed = false; try { // Load the list of Series to be deleted from the WorkQueueUid LoadUids(item); // Go through the list of series and add commands // to delete each of them. It's all or nothing. using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting Series from study {0}, A#:{1}, Patient: {2}, ID:{3}", study.StudyInstanceUid, study.AccessionNumber, study.PatientsName, study.PatientId))) { StudyXml studyXml = StorageLocation.LoadStudyXml(); IDictionary <string, Series> existingSeries = StorageLocation.Study.Series; // Add commands to delete the folders and update the xml foreach (WorkQueueUid uid in WorkQueueUidList) { // Delete from study XML if (studyXml.Contains(uid.SeriesInstanceUid)) { //Note: DeleteDirectoryCommand doesn't throw exception if the folder doesn't exist var xmlUpdate = new RemoveSeriesFromStudyXml(studyXml, uid.SeriesInstanceUid); processor.AddCommand(xmlUpdate); } // Delete from filesystem string path = StorageLocation.GetSeriesPath(uid.SeriesInstanceUid); if (Directory.Exists(path)) { var delDir = new DeleteDirectoryCommand(path, true); processor.AddCommand(delDir); } } // flush the updated xml to disk processor.AddCommand(new SaveXmlCommand(studyXml, StorageLocation)); // Update the db.. NOTE: these commands are executed at the end. foreach (WorkQueueUid uid in WorkQueueUidList) { // Delete from DB WorkQueueUid queueUid = uid; Series theSeries = existingSeries[queueUid.SeriesInstanceUid]; if (theSeries != null) { _seriesToDelete.Add(theSeries); var delSeries = new DeleteSeriesFromDBCommand(StorageLocation, theSeries); processor.AddCommand(delSeries); delSeries.Executing += DeleteSeriesFromDbExecuting; } else { // Series doesn't exist Platform.Log(LogLevel.Info, "Series {0} is invalid or no longer exists", uid.SeriesInstanceUid); } // The WorkQueueUid must be cleared before the entry can be removed from the queue var deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteUid); // Force a re-archival if necessary processor.AddCommand(new InsertArchiveQueueCommand(item.ServerPartitionKey, item.StudyStorageKey)); } if (!processor.Execute()) { throw new ApplicationException( String.Format("Error occurred when series from Study {0}, A#: {1}", study.StudyInstanceUid, study.AccessionNumber), processor.FailureException); } else { foreach (Series series in _seriesToDelete) { OnSeriesDeleted(series); } } } completed = true; } finally { if (completed) { OnCompleted(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } } }
/// <summary> /// Migrates the study to new tier /// </summary> /// <param name="storage"></param> /// <param name="newFilesystem"></param> private void DoMigrateStudy(StudyStorageLocation storage, ServerFilesystemInfo newFilesystem) { Platform.CheckForNullReference(storage, "storage"); Platform.CheckForNullReference(newFilesystem, "newFilesystem"); TierMigrationStatistics stat = new TierMigrationStatistics {StudyInstanceUid = storage.StudyInstanceUid}; stat.ProcessSpeed.Start(); StudyXml studyXml = storage.LoadStudyXml(); stat.StudySize = (ulong) studyXml.GetStudySize(); Platform.Log(LogLevel.Info, "About to migrate study {0} from {1} to {2}", storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.Description); string newPath = Path.Combine(newFilesystem.Filesystem.FilesystemPath, storage.PartitionFolder); DateTime startTime = Platform.Time; DateTime lastLog = Platform.Time; int fileCounter = 0; ulong bytesCopied = 0; long instanceCountInXml = studyXml.NumberOfStudyRelatedInstances; using (ServerCommandProcessor processor = new ServerCommandProcessor("Migrate Study")) { TierMigrationContext context = new TierMigrationContext { OriginalStudyLocation = storage, Destination = newFilesystem }; string origFolder = context.OriginalStudyLocation.GetStudyPath(); processor.AddCommand(new CreateDirectoryCommand(newPath)); newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyFolder); processor.AddCommand(new CreateDirectoryCommand(newPath)); newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyInstanceUid); // don't create this directory so that it won't be backed up by MoveDirectoryCommand CopyDirectoryCommand copyDirCommand = new CopyDirectoryCommand(origFolder, newPath, delegate (string path) { // Update the progress. This is useful if the migration takes long time to complete. FileInfo file = new FileInfo(path); bytesCopied += (ulong)file.Length; fileCounter++; if (file.Extension != null && file.Extension.Equals(ServerPlatform.DicomFileExtension, StringComparison.InvariantCultureIgnoreCase)) { TimeSpan elapsed = Platform.Time - lastLog; TimeSpan totalElapsed = Platform.Time - startTime; double speedInMBPerSecond = 0; if (totalElapsed.TotalSeconds > 0) { speedInMBPerSecond = (bytesCopied / 1024f / 1024f) / totalElapsed.TotalSeconds; } if (elapsed > TimeSpan.FromSeconds(WorkQueueSettings.Instance.TierMigrationProgressUpdateInSeconds)) { #region Log Progress StringBuilder stats = new StringBuilder(); if (instanceCountInXml != 0) { float pct = (float)fileCounter / instanceCountInXml; stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2} ({3:0}% completed). Speed={4:0.00}MB/s", fileCounter, bytesCopied / 1024f / 1024f, startTime, pct * 100, speedInMBPerSecond); } else { stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2}. Speed={3:0.00}MB/s", fileCounter, bytesCopied / 1024f / 1024f, startTime, speedInMBPerSecond); } Platform.Log(LogLevel.Info, "Tier migration for study {0}: {1}", storage.StudyInstanceUid, stats.ToString()); try { using (IUpdateContext ctx = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { IWorkQueueEntityBroker broker = ctx.GetBroker<IWorkQueueEntityBroker>(); WorkQueueUpdateColumns parameters = new WorkQueueUpdateColumns {FailureDescription = stats.ToString()}; broker.Update(WorkQueueItem.GetKey(), parameters); ctx.Commit(); } } catch { // can't log the progress so far... just ignore it } finally { lastLog = DateTime.Now; } #endregion } } }); processor.AddCommand(copyDirCommand); DeleteDirectoryCommand delDirCommand = new DeleteDirectoryCommand(origFolder, false) {RequiresRollback = false}; processor.AddCommand(delDirCommand); TierMigrateDatabaseUpdateCommand updateDbCommand = new TierMigrateDatabaseUpdateCommand(context); processor.AddCommand(updateDbCommand); Platform.Log(LogLevel.Info, "Start migrating study {0}.. expecting {1} to be moved", storage.StudyInstanceUid, ByteCountFormatter.Format(stat.StudySize)); if (!processor.Execute()) { if (processor.FailureException != null) throw processor.FailureException; throw new ApplicationException(processor.FailureReason); } stat.DBUpdate = updateDbCommand.Statistics; stat.CopyFiles = copyDirCommand.CopySpeed; stat.DeleteDirTime = delDirCommand.Statistics; } stat.ProcessSpeed.SetData(bytesCopied); stat.ProcessSpeed.End(); Platform.Log(LogLevel.Info, "Successfully migrated study {0} from {1} to {2} in {3} [ {4} files, {5} @ {6}, DB Update={7}, Remove Dir={8}]", storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.FilesystemTierEnum, TimeSpanFormatter.Format(stat.ProcessSpeed.ElapsedTime), fileCounter, ByteCountFormatter.Format(bytesCopied), stat.CopyFiles.FormattedValue, stat.DBUpdate.FormattedValue, stat.DeleteDirTime.FormattedValue); string originalPath = storage.GetStudyPath(); if (Directory.Exists(storage.GetStudyPath())) { Platform.Log(LogLevel.Info, "Original study folder could not be deleted. It must be cleaned up manually: {0}", originalPath); ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Warning, WorkQueueItem.WorkQueueTypeEnum.ToString(), 1000, GetWorkQueueContextData(WorkQueueItem), TimeSpan.Zero, "Study has been migrated to a new tier. Original study folder must be cleaned up manually: {0}", originalPath); } UpdateAverageStatistics(stat); }
/// <summary> /// Removes all WorkQueueUids from the database and delete the corresponding DICOM files from the filesystem. /// </summary> private void ProcessWorkQueueUids() { if (Study == null) Platform.Log(LogLevel.Info, "Begin StudyProcess Cleanup (Study has not been created): Attempt #{0}. {1} unprocessed files will be removed", WorkQueueItem.FailureCount + 1, WorkQueueUidList.Count); else Platform.Log(LogLevel.Info, "Begin StudyProcess Cleanup for study {0}, Patient {1} (PatientId:{2} A#:{3}) on Partition {4}. Attempt #{5}. {6} unprocessed files will be removed", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description, WorkQueueItem.FailureCount + 1, WorkQueueUidList.Count ); foreach (WorkQueueUid sop in WorkQueueUidList) { string path = GetFileStoredPath(sop); Platform.Log(LogLevel.Info, "Cleaning up {0}", path); using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", sop.SopInstanceUid))) { // delete the file FileDeleteCommand deleteFile = new FileDeleteCommand(path, true); processor.AddCommand(deleteFile); // delete the WorkQueueUID from the database DeleteWorkQueueUidCommand deleteUid = new DeleteWorkQueueUidCommand(sop); processor.AddCommand(deleteUid); try { // delete the directory (if empty) var fileInfo = new FileInfo(path); ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand deleteDir = new ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand(fileInfo.Directory.FullName, false, true); processor.AddCommand(deleteDir); } catch (DirectoryNotFoundException ex) { // ignore } if (!processor.Execute()) { throw new Exception(String.Format("Unable to delete SOP {0}", sop.SopInstanceUid), processor.FailureException); } } // Delete the base directory if it's empty var baseDir = GetBaseDirectory(sop); if (Directory.Exists(baseDir)) { using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", sop.SopInstanceUid))) { ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand deleteDir = new ClearCanvas.ImageServer.Core.Command.DeleteDirectoryCommand(baseDir, false, true); processor.AddCommand(deleteDir); if (!processor.Execute()) { throw new Exception(String.Format("Unable to delete {0}", baseDir), processor.FailureException); } } } } }
/// <summary> /// Migrates the study to new tier /// </summary> /// <param name="storage"></param> /// <param name="newFilesystem"></param> private void DoMigrateStudy(StudyStorageLocation storage, ServerFilesystemInfo newFilesystem) { Platform.CheckForNullReference(storage, "storage"); Platform.CheckForNullReference(newFilesystem, "newFilesystem"); TierMigrationStatistics stat = new TierMigrationStatistics { StudyInstanceUid = storage.StudyInstanceUid }; stat.ProcessSpeed.Start(); StudyXml studyXml = storage.LoadStudyXml(); stat.StudySize = (ulong)studyXml.GetStudySize(); Platform.Log(LogLevel.Info, "About to migrate study {0} from {1} to {2}", storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.Description); string newPath = Path.Combine(newFilesystem.Filesystem.FilesystemPath, storage.PartitionFolder); DateTime startTime = Platform.Time; DateTime lastLog = Platform.Time; int fileCounter = 0; ulong bytesCopied = 0; long instanceCountInXml = studyXml.NumberOfStudyRelatedInstances; using (ServerCommandProcessor processor = new ServerCommandProcessor("Migrate Study")) { TierMigrationContext context = new TierMigrationContext { OriginalStudyLocation = storage, Destination = newFilesystem }; string origFolder = context.OriginalStudyLocation.GetStudyPath(); processor.AddCommand(new CreateDirectoryCommand(newPath)); newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyFolder); processor.AddCommand(new CreateDirectoryCommand(newPath)); newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyInstanceUid); // don't create this directory so that it won't be backed up by MoveDirectoryCommand CopyDirectoryCommand copyDirCommand = new CopyDirectoryCommand(origFolder, newPath, delegate(string path) { // Update the progress. This is useful if the migration takes long time to complete. FileInfo file = new FileInfo(path); bytesCopied += (ulong)file.Length; fileCounter++; if (file.Extension != null && file.Extension.Equals(ServerPlatform.DicomFileExtension, StringComparison.InvariantCultureIgnoreCase)) { TimeSpan elapsed = Platform.Time - lastLog; TimeSpan totalElapsed = Platform.Time - startTime; double speedInMBPerSecond = 0; if (totalElapsed.TotalSeconds > 0) { speedInMBPerSecond = (bytesCopied / 1024f / 1024f) / totalElapsed.TotalSeconds; } if (elapsed > TimeSpan.FromSeconds(WorkQueueSettings.Instance.TierMigrationProgressUpdateInSeconds)) { #region Log Progress StringBuilder stats = new StringBuilder(); if (instanceCountInXml != 0) { float pct = (float)fileCounter / instanceCountInXml; stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2} ({3:0}% completed). Speed={4:0.00}MB/s", fileCounter, bytesCopied / 1024f / 1024f, startTime, pct * 100, speedInMBPerSecond); } else { stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2}. Speed={3:0.00}MB/s", fileCounter, bytesCopied / 1024f / 1024f, startTime, speedInMBPerSecond); } Platform.Log(LogLevel.Info, "Tier migration for study {0}: {1}", storage.StudyInstanceUid, stats.ToString()); try { using (IUpdateContext ctx = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { IWorkQueueEntityBroker broker = ctx.GetBroker <IWorkQueueEntityBroker>(); WorkQueueUpdateColumns parameters = new WorkQueueUpdateColumns { FailureDescription = stats.ToString() }; broker.Update(WorkQueueItem.GetKey(), parameters); ctx.Commit(); } } catch { // can't log the progress so far... just ignore it } finally { lastLog = DateTime.Now; } #endregion } } }); processor.AddCommand(copyDirCommand); DeleteDirectoryCommand delDirCommand = new DeleteDirectoryCommand(origFolder, false) { RequiresRollback = false }; processor.AddCommand(delDirCommand); TierMigrateDatabaseUpdateCommand updateDbCommand = new TierMigrateDatabaseUpdateCommand(context); processor.AddCommand(updateDbCommand); Platform.Log(LogLevel.Info, "Start migrating study {0}.. expecting {1} to be moved", storage.StudyInstanceUid, ByteCountFormatter.Format(stat.StudySize)); if (!processor.Execute()) { if (processor.FailureException != null) { throw processor.FailureException; } throw new ApplicationException(processor.FailureReason); } stat.DBUpdate = updateDbCommand.Statistics; stat.CopyFiles = copyDirCommand.CopySpeed; stat.DeleteDirTime = delDirCommand.Statistics; } stat.ProcessSpeed.SetData(bytesCopied); stat.ProcessSpeed.End(); Platform.Log(LogLevel.Info, "Successfully migrated study {0} from {1} to {2} in {3} [ {4} files, {5} @ {6}, DB Update={7}, Remove Dir={8}]", storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.FilesystemTierEnum, TimeSpanFormatter.Format(stat.ProcessSpeed.ElapsedTime), fileCounter, ByteCountFormatter.Format(bytesCopied), stat.CopyFiles.FormattedValue, stat.DBUpdate.FormattedValue, stat.DeleteDirTime.FormattedValue); string originalPath = storage.GetStudyPath(); if (Directory.Exists(storage.GetStudyPath())) { Platform.Log(LogLevel.Info, "Original study folder could not be deleted. It must be cleaned up manually: {0}", originalPath); ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Warning, WorkQueueItem.WorkQueueTypeEnum.ToString(), 1000, GetWorkQueueContextData(WorkQueueItem), TimeSpan.Zero, "Study has been migrated to a new tier. Original study folder must be cleaned up manually: {0}", originalPath); } UpdateAverageStatistics(stat); }