/// <summary> /// Move the file specified in the path to the incoming folder so that it will be imported again /// </summary> /// <param name="path"></param> private void MoveFileToIncomingFolder(string path) { Platform.Log(LogLevel.Debug, "Moving file {0} to incoming folder", path); // should not proceed because it may mean incomplete study if (!File.Exists(path)) { throw new FileNotFoundException(string.Format("File is missing: {0}", path)); } // move the file to the Incoming folder to reprocess using (var processor = new ServerCommandProcessor("Move file back to incoming folder")) { var fileInfo = new FileInfo(path); const string folder = "FromWorkQueue"; var incomingPath = GetServerPartitionIncomingFolder(); incomingPath = Path.Combine(incomingPath, "FromWorkQueue"); incomingPath = Path.Combine(incomingPath, StorageLocation.StudyInstanceUid); var createDirCommand = new CreateDirectoryCommand(incomingPath); processor.AddCommand(createDirCommand); incomingPath = Path.Combine(incomingPath, fileInfo.Name); var move = new RenameFileCommand(path, incomingPath, true); processor.AddCommand(move); if (!processor.Execute()) { throw new Exception("Unexpected error happened when trying to move file back to the incoming folder for reprocess", processor.FailureException); } Platform.Log(LogLevel.Info, "File {0} has been moved to the incoming folder.", path); } }
/// <summary> /// Add item list to the queue /// </summary> /// <param name="fileFolderList"></param> public void Synchronize(List <FileFolderInfo> fileFolderList) { //Create commands foreach (var item in fileFolderList) { if (item.FileFolder == null) { var command = new CreateDirectoryCommand(item.GroupId, item.Path, item.SyncFolder); command.ExecuteError += new ExecuteErrorEventHandler(OnExecuteError); _commandProcessor.AddCommand(command); } else if (item.FileFolder.type == "F") { var command = new CreateDirectoryCommand(item.GroupId, item.FileFolder, item.Path, item.SyncFolder); command.ExecuteError += new ExecuteErrorEventHandler(OnExecuteError); _commandProcessor.AddCommand(command); } else { var command = new DownloadFileCommand(_ticket, item.GroupId, item.FolderId, item.FileFolder, item.Path, _path); command.ExecuteError += new ExecuteErrorEventHandler(OnExecuteError); _commandProcessor.AddCommand(command); } } }
public void OnStudyDeleting() { if (!Enabled) { return; } StudyStorageLocation storage = _context.StorageLocation; IList <ArchiveStudyStorage> archives = StudyStorageLocation.GetArchiveLocations(storage.GetKey()); if (archives != null && archives.Count > 0) { _archives = new DeletedStudyArchiveInfoCollection(); foreach (ArchiveStudyStorage archive in archives) { DeletedStudyArchiveInfo archiveInfo = new DeletedStudyArchiveInfo(); archiveInfo.ArchiveTime = archive.ArchiveTime; archiveInfo.ArchiveXml = archive.ArchiveXml; archiveInfo.PartitionArchiveRef = PartitionArchive.Load(archive.PartitionArchiveKey).GetKey().Key; archiveInfo.TransferSyntaxUid = archive.ServerTransferSyntax.Uid; _archives.Add(archiveInfo); } } // only backup if study is manually deleted if (_context.WorkQueueItem.WorkQueueTypeEnum == WorkQueueTypeEnum.WebDeleteStudy) { using (var processor = new ServerCommandProcessor("Backup deleted study")) { string path = _context.Filesystem.ResolveAbsolutePath(BackupSubPath); Platform.Log(LogLevel.Info, "Saving a copy of the study to {0}...", path); var mkdir = new CreateDirectoryCommand(path); processor.AddCommand(mkdir); var zip = new ZipStudyFolderCommand(storage.GetStudyPath(), BackupFullPath); processor.AddCommand(zip); if (!processor.Execute()) { throw new ApplicationException(String.Format("Unable to backup study: {0}", processor.FailureReason)); } } } }
/// <summary> /// Inserts a <see cref="StudyIntegrityQueue"/> entry for manual reconciliation. /// </summary> /// <param name="file">The DICOM file that needs to be reconciled.</param> /// <param name="reason">The type of <see cref="StudyIntegrityQueue"/> entry to be inserted.</param> /// <param name="uid">A UID to delete on insert.</param> /// <remarks> /// A copy of the DICOM file will be stored in a special folder allocated for /// reconciliation purpose. The caller is responsible for managing the original copy. /// </remarks> public void ScheduleReconcile(DicomFile file, StudyIntegrityReasonEnum reason, WorkQueueUid uid) { Platform.CheckForNullReference(_context.StudyLocation, "_context.StudyLocation"); Platform.Log(LogLevel.Info, "Scheduling new manual reconciliation for SOP {0}", file.MediaStorageSopInstanceUid); ServerFilesystemInfo fs = FilesystemMonitor.Instance.GetFilesystemInfo(_context.StudyLocation.FilesystemKey); Platform.CheckForNullReference(fs, "fs"); ReconcileStorage reconcileStorage = new ReconcileStorage(_context.StudyLocation, _context.Group); using (ServerCommandProcessor processor = new ServerCommandProcessor("Schedule Manual Reconciliation")) { string path = reconcileStorage.GetSopInstancePath(file.DataSet[DicomTags.SopInstanceUid].ToString()); DirectoryInfo dir = new DirectoryInfo(path); if (dir.Parent != null) { CreateDirectoryCommand mkdir = new CreateDirectoryCommand(dir.Parent.FullName); processor.AddCommand(mkdir); } SaveDicomFileCommand saveFileCommand = new SaveDicomFileCommand(path, file, true); processor.AddCommand(saveFileCommand); InsertSIQCommand updateStudyCommand = new InsertSIQCommand(_context.StudyLocation, reason, file, _context.Group, reconcileStorage); processor.AddCommand(updateStudyCommand); if (uid != null) { processor.AddCommand(new DeleteWorkQueueUidCommand(uid)); } if (processor.Execute() == false) { throw new ApplicationException(String.Format("Unable to schedule image reconcilation : {0}", processor.FailureReason), processor.FailureException); } } }
private async Task SynchronizeOnlineFilesFolders() { var folders = SyncTableManager.GetFolders(); foreach (var folder in folders) { try { var filesFolders = await DokuFlexService.GetFilesFoldersAsync(_ticket, folder.GroupId, folder.FolderId); foreach (var fileFolder in filesFolders) { if (fileFolder.type == "C") { //Check if file exists in sync table var file = SyncTableManager.GetByFileId(fileFolder.id); if (file != null) { if (fileFolder.modifiedTime > file.ModifiedTime) { SyncTableManager.ChangeSyncStatusToPending(file.Path); var command = new DownloadFileCommand(_ticket, folder.GroupId, folder.FolderId, fileFolder, file.Path, _path); command.ExecuteError += new ExecuteErrorEventHandler(OnExecuteError); _commandProcessor.AddCommand(command); } } else { var path = string.Format("{0}\\{1}", folder.Path, fileFolder.name); //Add item as pending var item = new SyncTableItem { Name = fileFolder.name, Path = path, LastWriteTime = 0, Type = "C", GroupId = folder.GroupId, FolderId = folder.FolderId, FileId = string.Empty, ModifiedTime = 0, SyncFolder = false, SyncStatus = SyncTableItemStatus.Pending }; SyncTableManager.Add(item); var command = new DownloadFileCommand(_ticket, folder.GroupId, folder.FolderId, fileFolder, path, _path); command.ExecuteError += new ExecuteErrorEventHandler(OnExecuteError); _commandProcessor.AddCommand(command); } } else { //Check if folder exists if (!folders.Any(f => f.FolderId.Equals(fileFolder.id) && f.Type == "F")) { var path = String.Format("{0}\\{1}", folder.Path, fileFolder.name); var command = new CreateDirectoryCommand(folder.GroupId, fileFolder, path); command.ExecuteError += new ExecuteErrorEventHandler(OnExecuteError); _commandProcessor.AddCommand(command); } } } var files = SyncTableManager.GetFiles(folder.FolderId); foreach (var file in files) { if (!filesFolders.Any(f => f.id.Equals(file.FileId))) { SyncTableManager.ChangeSyncStatusToPending(file.Path); var command = new DeleteFileCommand(file.Path); command.ExecuteError += new ExecuteErrorEventHandler(OnExecuteError); _commandProcessor.AddCommand(command); } } } catch (Exception ex) { //Check is folder doesn't exists if (ex is RestResponseException) { var exception = ex as RestResponseException; //Folder doesn't exists if (exception.ErrorCode == 1) { var command = new DeleteDirectoryCommand(folder.Path); command.ExecuteError += new ExecuteErrorEventHandler(OnExecuteError); _commandProcessor.AddCommand(command); } } } } }