public void AggregateTest() { string file; using (var processor = new TestCommandProcessor()) { file = Path.Combine(processor.ProcessorContext.TempDirectory, "AggregateTest.dcm"); if (File.Exists(file)) { File.Delete(file); } var aggregateCommand = new AggregateCommand(); processor.AddCommand(aggregateCommand); aggregateCommand.AddSubCommand(new SaveDicomFileCommand(file, _dicomFile, false)); aggregateCommand.AddSubCommand(new FileDeleteCommand(file, true)); Assert.IsTrue(processor.Execute()); Assert.IsFalse(processor.TestContext.RollbackEncountered); Assert.AreEqual(processor.TestContext.CommandsExecuted, 3); Assert.IsFalse(File.Exists(file)); } using (var processor = new TestCommandProcessor()) { file = Path.Combine(processor.ProcessorContext.TempDirectory, "AggregateTest.dcm"); processor.AddCommand(new SaveDicomFileCommand(file, _dicomFile, false)); var aggregateCommand = new AggregateCommand(); processor.AddCommand(aggregateCommand); aggregateCommand.AddSubCommand(new SaveDicomFileCommand(file, _dicomFile, true)); Assert.IsFalse(processor.Execute()); Assert.IsTrue(processor.TestContext.RollbackEncountered); Assert.AreEqual(processor.TestContext.CommandsExecuted, 3); Assert.IsFalse(File.Exists(file)); } }
public void AggregateTest() { string file; using (var processor = new TestCommandProcessor()) { file = Path.Combine(processor.ProcessorContext.TempDirectory, "AggregateTest.dcm"); if (File.Exists(file)) File.Delete(file); var aggregateCommand = new AggregateCommand(); processor.AddCommand(aggregateCommand); aggregateCommand.AddSubCommand(new SaveDicomFileCommand(file, _dicomFile, false)); aggregateCommand.AddSubCommand(new FileDeleteCommand(file, true)); Assert.IsTrue(processor.Execute()); Assert.IsFalse(processor.TestContext.RollbackEncountered); Assert.AreEqual(processor.TestContext.CommandsExecuted, 3); Assert.IsFalse(File.Exists(file)); } using (var processor = new TestCommandProcessor()) { file = Path.Combine(processor.ProcessorContext.TempDirectory, "AggregateTest.dcm"); processor.AddCommand(new SaveDicomFileCommand(file, _dicomFile, false)); var aggregateCommand = new AggregateCommand(); processor.AddCommand(aggregateCommand); aggregateCommand.AddSubCommand(new SaveDicomFileCommand(file, _dicomFile, true)); Assert.IsFalse(processor.Execute()); Assert.IsTrue(processor.TestContext.RollbackEncountered); Assert.AreEqual(processor.TestContext.CommandsExecuted, 3); Assert.IsFalse(File.Exists(file)); } }
private void InsertBatch(IList <ProcessorFile> list, StudyXml studyXml) { using (var processor = new ViewerCommandProcessor("Processing WorkItem DICOM file(s)")) { try { // Create an AggregrateCommand where we batch together all the database updates // and execute them together as the last command. var batchDatabaseCommand = new AggregateCommand(); foreach (var file in list) { if (!string.IsNullOrEmpty(file.FilePath) && file.File == null) { try { file.File = new DicomFile(file.FilePath); // WARNING: If we ever do anything where we update files and save them, // we may have to change this. file.File.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); } catch (FileNotFoundException) { Platform.Log(LogLevel.Warn, "File to be processed is not found, ignoring: {0}", file.FilePath); if (file.ItemUid != null) { batchDatabaseCommand.AddSubCommand(new CompleteWorkItemUidCommand(file.ItemUid)); } continue; } } else { file.FilePath = file.File.Filename; } String seriesUid = file.File.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty); String sopUid = file.File.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty); String finalDest = StudyLocation.GetSopInstancePath(seriesUid, sopUid); if (file.FilePath != finalDest) { processor.AddCommand(new RenameFileCommand(file.FilePath, finalDest, false)); } // Update the StudyStream object var insertStudyXmlCommand = new InsertStudyXmlCommand(file.File, studyXml, StudyLocation, false); processor.AddCommand(insertStudyXmlCommand); if (file.ItemUid != null) { batchDatabaseCommand.AddSubCommand(new CompleteWorkItemUidCommand(file.ItemUid)); } } // Now save the batched updates to the StudyXml file. processor.AddCommand(new SaveStudyXmlCommand(studyXml, StudyLocation)); // Update the Study table, based on the studyXml var updateReason = IsReprocess ? InsertOrUpdateStudyCommand.UpdateReason.Reprocessing : InsertOrUpdateStudyCommand.UpdateReason.LiveImport; batchDatabaseCommand.AddSubCommand(new InsertOrUpdateStudyCommand(StudyLocation, studyXml, updateReason)); // Now, add all the batched database updates processor.AddCommand(batchDatabaseCommand); // Do the actual processing if (!processor.Execute()) { Platform.Log(LogLevel.Error, "Failure processing {0} for Study: {1}", processor.Description, StudyLocation.Study.StudyInstanceUid); throw new ApplicationException( "Unexpected failure (" + processor.FailureReason + ") executing command for Study: " + StudyLocation.Study.StudyInstanceUid, processor.FailureException); } StudyLocation.Study = processor.ViewerContext.ContextStudy; Platform.Log(LogLevel.Info, "Processed {0} SOPs for Study {1}", list.Count, StudyLocation.Study.StudyInstanceUid); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when {0}. Rolling back operation.", processor.Description); processor.Rollback(); throw new ApplicationException("Unexpected exception when processing file.", e); } } }