public void AssertTagValueChanged(uint tag, string valueToSet, string originalCharacterSet, string expectedNewCharacterSet) { DicomAttributeCollection dataset = new DicomAttributeCollection(); SetupDataSet(dataset, originalCharacterSet); DicomFile file = new DicomFile("test", CreateMetaInfo(), dataset); Assert.AreEqual(originalCharacterSet, file.DataSet[DicomTags.SpecificCharacterSet].ToString()); SetTagCommand cmd = new SetTagCommand(tag, valueToSet); Assert.AreEqual(cmd.CanSaveInUnicode, UnicodeAllowed, "SetTagCommand.CanSaveInUnicode returns an incorrect value"); Assert.IsTrue(cmd.Apply(file), "SetTagCommand.Apply failed"); var filename = string.Format("Test-{0}.dcm", DicomTagDictionary.GetDicomTag(tag).Name); Assert.IsTrue(file.Save(filename), "Unable to save dicom file"); file = new DicomFile(filename); file.Load(); if (valueToSet == null) Assert.AreEqual(string.Empty, file.DataSet[tag].ToString()); else Assert.AreEqual(valueToSet, file.DataSet[tag].ToString()); Assert.IsTrue(file.DataSet[DicomTags.SpecificCharacterSet].ToString().Equals(expectedNewCharacterSet)); Delete(filename); }
private void buttonCompress_Click(object sender, EventArgs e) { TransferSyntax syntax = this.comboBoxCompressionType.SelectedItem as TransferSyntax; if (syntax == null) { MessageBox.Show("Transfer syntax not selected"); return; } DicomFile dicomFile = new DicomFile(textBoxSourceFile.Text); dicomFile.Load(); if (dicomFile.TransferSyntax.Encapsulated) { MessageBox.Show(String.Format("Message encoded as {0}, cannot compress.", dicomFile.TransferSyntax)); return; } dicomFile.Filename = textBoxDestinationFile.Text; dicomFile.ChangeTransferSyntax(syntax); dicomFile.Save(); }
/// <summary> /// Creates an instance of <see cref="DicomPixelData"/> from specified stream /// </summary> /// <param name="stream"></param> /// <returns> /// </returns> public static DicomPixelData CreateFrom(Stream stream) { DicomFile file = new DicomFile(); file.Load(stream); return(CreateFrom(file)); }
public static void MultiFrameProcess(DbStudy study) { string dcmPath = ADCM.GetStoreString(); var seriesList = Directory.GetDirectories(Path.Combine(dcmPath, study.study_uid)); foreach (var sePath in seriesList) { var filesList = Directory.GetFiles(sePath, "*.dcm"); if (filesList.Length < 2) continue; for (int i = 0; i < filesList.Length; i++) { var dcm = new DicomFile(filesList[i]); dcm.Load(); int frameCount = dcm.DataSet[DicomTags.NumberOfFrames].GetInt16(0, 0); if (frameCount > 1) { string newSeriesUID = sePath + "." + i; newSeriesUID = newSeriesUID.Substring(newSeriesUID.LastIndexOf(Path.DirectorySeparatorChar) + 1); string newSeriesPath = Path.Combine(dcmPath, study.study_uid, newSeriesUID); Directory.CreateDirectory(newSeriesPath); string fileName = Path.GetFileName(filesList[i]); string oldPath = filesList[i]; string newPath = Path.Combine(newSeriesPath, fileName); File.Move(filesList[i], Path.Combine(newSeriesPath, fileName)); } } } foreach (string sePath in seriesList) { var filesCount = Directory.GetFiles(sePath); if (filesCount.Length < 1) Directory.Delete(sePath); } }
/// <summary> /// Creates an instance of <see cref="DicomPixelData"/> from specified image path /// </summary> /// <param name="path"></param> /// <returns> /// </returns> public static DicomPixelData CreateFrom(string path) { DicomFile file = new DicomFile(path); file.Load(); return(CreateFrom(file)); }
//public static void ConvertBmpToDicomAndAddToExistingFolder(string bmpFilePath, string dicomFolderPath, string newFileName = "") //{ // if (string.IsNullOrEmpty(newFileName)) newFileName = Path.GetFileNameWithoutExtension(bmpFilePath); // if (Directory.GetFiles(dicomFolderPath) // .Any(f => Path.GetFileName(f).ToLower().Contains(newFileName ?? throw new ArgumentNullException(nameof(newFileName))))) // newFileName = DateTime.Now.ToString("yyyyMMddHHmmssfff"); // var dicomFileHighestInstanceNo = GetDicomFileWithHighestInstanceNumber(dicomFolderPath); // var headers = GetDicomTags(dicomFileHighestInstanceNo); // var newFilePath = Path.Combine(dicomFolderPath, newFileName ?? throw new ArgumentNullException(nameof(newFileName))); // ConvertBmpToDicom(bmpFilePath, newFilePath, dicomFileHighestInstanceNo); // var newFileInstanceNumber = headers.InstanceNumber.Values == null || headers.InstanceNumber.Values.Length < 1 ? 1 : int.Parse(headers.InstanceNumber.Values[0]) + 1; // headers.InstanceNumber.Values = new[] { newFileInstanceNumber.ToString() }; // UpdateDicomHeaders(newFilePath, headers, DicomNewObjectType.NewImage); //} public static string GetPatientIdFromDicomFile(string dicomFilePath) { var dcmFile = new ClearCanvas.Dicom.DicomFile(dicomFilePath); dcmFile.Load(dicomFilePath); var patientIdTag = dcmFile.DataSet[1048608].Values as string[]; return(patientIdTag?[0]); }
/// <summary> /// Updates the given Dicom headers without trying to be clever. /// </summary> /// <param name="filepath"></param> /// <param name="tags"></param> public static void ForceUpdateDicomHeaders(string filepath, DicomTagCollection tags) { var dcmFile = new ClearCanvas.Dicom.DicomFile(filepath); dcmFile.Load(filepath); tags.ToList().ForEach(tag => UpdateTag(dcmFile, tag)); dcmFile.Save(); }
public static void UpdateImagePositionFromReferenceSeries(string[] dicomFilesToUpdate, string[] orientationDicomFiles) { if (dicomFilesToUpdate == null || dicomFilesToUpdate.Length == 0) { throw new ArgumentNullException("Orientation Dicom files not available to read from"); } if (orientationDicomFiles == null || orientationDicomFiles.Length == 0) { throw new ArgumentNullException("Dicom files to copy orientation data to not available"); } if (dicomFilesToUpdate.Length != orientationDicomFiles.Length) { throw new Exception("Number of files in \"Orientation dicom\" folder and \"Dicom Files to Update\" do not match"); } var orderedFilesToUpdate = dicomFilesToUpdate .OrderBy((f) => { var vals = GetDicomTags(f).InstanceNumber.Values; return(vals != null && vals.Length > 0 ? int.Parse(GetDicomTags(f).InstanceNumber.Values[0]) : 0); }).ToArray(); var orderedOrientationFiles = orientationDicomFiles .OrderBy((f) => { var vals = GetDicomTags(f).InstanceNumber.Values; return(vals != null && vals.Length > 0 ? int.Parse(GetDicomTags(f).InstanceNumber.Values[0]) : 0); }).ToArray(); for (var i = 0; i < orderedFilesToUpdate.Count(); i++) { var fileToUpdate = orderedFilesToUpdate[i]; var orientationFile = orderedOrientationFiles[i]; var imagePatientOrientation = GetDicomTags(orientationFile).ImagePositionPatient; var imageOrientation = GetDicomTags(orientationFile).ImageOrientation; var frameOfReferenceUid = GetDicomTags(orientationFile).FrameOfReferenceUid; var sliceLocation = GetDicomTags(orientationFile).SliceLocation; var dcmFile = new ClearCanvas.Dicom.DicomFile(); dcmFile.Load(fileToUpdate); dcmFile = UpdateArrayTag(dcmFile, imagePatientOrientation, imagePatientOrientation.Values); dcmFile = UpdateArrayTag(dcmFile, imageOrientation, imageOrientation.Values); dcmFile = UpdateArrayTag(dcmFile, frameOfReferenceUid, frameOfReferenceUid.Values); dcmFile = UpdateArrayTag(dcmFile, sliceLocation, sliceLocation.Values); dcmFile.Save(fileToUpdate); } }
/// <summary> /// Update the tags for the given files. Files will be given a generated SeriesInstanceUid and ImageUid. /// </summary> /// <param name="filesPath">List of files to apply the tags to.</param> /// <param name="tags">The tags which you'd like to apply to the above files.</param> public static void GenerateSeriesHeaderForAllFiles(string[] filesPath, DicomTagCollection tags, int uidpostfix = 1) { tags.SeriesInstanceUid.Values = new[] { GenerateNewSeriesUid(uidpostfix.ToString()) }; tags.ImageUid.Values = new[] { GenerateNewImageUid() }; foreach (var filepath in filesPath) { var dcmFile = new ClearCanvas.Dicom.DicomFile(filepath); dcmFile.Load(filepath); dcmFile = UpdateTags(dcmFile, tags, TagType.Series); dcmFile.Save(filepath); } }
public ImageStreamingContext(HttpListenerContext context) { Request = context.Request; Response = context.Response; NameValueCollection query = Request.QueryString; #region INIT STUFF FOR PERFORMANCE TESTING #if DEBUG if (query["testcompressed"] != null) { testCompressed= true; } else if (query["testuncompressed"] != null) { testUncompressed = true; } if (_testCompressedImage == null) { using (Stream stream = typeof(ImageStreamingContext).Assembly.GetManifestResourceStream("ClearCanvas.ImageServer.Services.Streaming.ImageStreaming.Test.TestSamples.compressed.dcm")) { DicomFile file = new DicomFile(); file.Load(stream); _testCompressedImage = DicomPixelData.CreateFrom(file); } } if (_testUncompressedImage == null) { using (Stream stream = typeof(ImageStreamingContext).Assembly.GetManifestResourceStream("ClearCanvas.ImageServer.Services.Streaming.ImageStreaming.Test.TestSamples.uncompressed.dcm")) { DicomFile file = new DicomFile(); file.Load(stream); _testUncompressedImage = DicomPixelData.CreateFrom(file); } } #endif #endregion _frameNumber = 0; if (query["FrameNumber"] != null) int.TryParse(query["FrameNumber"], out _frameNumber); _nextSeriesUid = query["nextSeriesUid"]; _nextSopUid = query["nextObjectUid"]; }
/// <summary> /// Gets the tags for a dicom file. /// </summary> /// <param name="filePath">The dicom file.</param> /// <returns></returns> public static DicomTagCollection GetDicomTags(string filePath) { var dcmFile = new ClearCanvas.Dicom.DicomFile(filePath); dcmFile.Load(filePath); var tags = new DicomTagCollection(); var updatedTags = new DicomTagCollection(); foreach (var tag in tags.ToList()) { updatedTags.SetTagValue(tag.GetTagValue(), dcmFile.DataSet[tag.GetTagValue()].Values); } return(updatedTags); }
private void ButtonLoadFile_Click(object sender, EventArgs e) { openFileDialog.DefaultExt = "dcm"; openFileDialog.ShowDialog(); DicomFile dicomFile = new DicomFile(openFileDialog.FileName); DicomReadOptions options = new DicomReadOptions(); dicomFile.Load(options); _theStream.AddFile(dicomFile); }
public ImportSopResponse ImportSop(ImportSopRequest request) { try { var theFile = new DicomFile(string.Format("{0}{1}", request.SopInstanceUid, ServerPlatform.DicomFileExtension)); using (var stream = new LargeMemoryStream(request.SopInstance)) { theFile.Load(stream); } var partition = ServerPartitionMonitor.Instance.GetPartition(request.CalledAETitle); string aeTitle = theFile.SourceApplicationEntityTitle; if (_importerContext == null) _importerContext = new SopInstanceImporterContext( String.Format("{0}_{1}", aeTitle, DateTime.Now.ToString("yyyyMMddhhmmss")), partition.AeTitle, partition); var utility = new SopInstanceImporter(_importerContext); var importResult = utility.Import(theFile); if (!importResult.Successful) Platform.Log(LogLevel.Error, "Failure importing file file from Web Service: {0}, SOP Instance UID: {1}", importResult.ErrorMessage, request.SopInstanceUid); else Platform.Log(LogLevel.Info, "Processed import for SOP through Web Service: {0}.", request.SopInstanceUid); var result = new ImportSopResponse { DicomStatusCode = importResult.DicomStatus.Code, FailureMessage = importResult.ErrorMessage, Successful = importResult.Successful }; return result; } catch (Exception ex) { var message = string.Format("Failed to import files: {0}, SOP Instance UID: {1}", ex.Message, request.SopInstanceUid); Platform.Log(LogLevel.Error, message); throw new FaultException(message); } }
public MimeTypeProcessorOutput Process(ImageStreamingContext context) { var output = new MimeTypeProcessorOutput { ContentType = OutputMimeType }; var file = new DicomFile(context.ImagePath); file.Load(DicomReadOptions.StorePixelDataReferences); if (!file.SopClass.Equals(SopClass.EncapsulatedPdfStorage)) throw new WADOException(HttpStatusCode.NotImplemented, "image/pdf is not supported for this type of object: " + file.SopClass.Name); var iod = new EncapsulatedDocumentModuleIod(file.DataSet); output.Output = iod.EncapsulatedDocument; return output; }
public DicomFile LoadDicomFile(LoadDicomFileArgs args) { try { var client = new StreamingClient(_wadoUri); var file = new DicomFile(); using (var stream = client.RetrieveImageHeader(_aeTitle, args.StudyInstanceUid, args.SeriesInstanceUid, args.SopInstanceUid)) { file.Load(stream); } return file; } catch (Exception e) { throw TranslateStreamingException(e); } }
private void buttonDecompress_Click(object sender, EventArgs e) { if (this.textBoxSourceFile.Text.Length == 0 || this.textBoxDestinationFile.Text.Length == 0) { MessageBox.Show("Invalid source or destination filename"); return; } DicomFile dicomFile = new DicomFile(textBoxSourceFile.Text); dicomFile.Load(); dicomFile.Filename = textBoxDestinationFile.Text; dicomFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian); dicomFile.Save(); }
public DicomFile LoadDicomFile(LoadDicomFileArgs args) { try { Uri uri = new Uri(string.Format(StreamingSettings.Default.FormatWadoUriPrefix, _hostName, _wadoServicePort)); var client = new StreamingClient(uri); var file = new DicomFile(); using (var stream = client.RetrieveImageHeader(_aeTitle, args.StudyInstanceUid, args.SeriesInstanceUid, args.SopInstanceUid)) { file.Load(stream); } return file; } catch (Exception e) { throw TranslateStreamingException(e); } }
public MimeTypeProcessorOutput Process(ImageStreamingContext context) { uint stopTag; if (!uint.TryParse(context.Request.QueryString["stopTag"] ?? "", NumberStyles.HexNumber, null, out stopTag)) stopTag = DicomTags.PixelData; if (stopTag > DicomTags.PixelData) throw new WADOException(HttpStatusCode.BadRequest, "Stop tag must be less than PixelData tag."); MimeTypeProcessorOutput output = new MimeTypeProcessorOutput(); output.ContentType = OutputMimeType; DicomFile file = new DicomFile(context.ImagePath); file.Load(stopTag, DicomReadOptions.Default); output.ContentType = OutputMimeType; MemoryStream memStream = new MemoryStream(); file.Save(memStream, DicomWriteOptions.Default); output.Output = memStream.ToArray(); return output; }
public bool AddFileToSend(String file) { try { DicomFile dicomFile = new DicomFile(file); // Only load to sopy instance uid to reduce amount of data read from file dicomFile.Load(DicomTags.SopInstanceUid, DicomReadOptions.Default | DicomReadOptions.DoNotStorePixelDataInDataSet); FileToSend fileStruct = new FileToSend(); fileStruct.filename = file; string sopClassInFile = dicomFile.DataSet[DicomTags.SopClassUid].ToString(); if (sopClassInFile.Length == 0) return false; if (!sopClassInFile.Equals(dicomFile.SopClass.Uid)) { Logger.LogError("SOP Class in Meta Info does not match SOP Class in DataSet"); fileStruct.sopClass = SopClass.GetSopClass(sopClassInFile); } else fileStruct.sopClass = dicomFile.SopClass; fileStruct.transferSyntax = dicomFile.TransferSyntax; _fileList.Add(fileStruct); } catch (DicomException e) { Logger.LogErrorException(e, "Unexpected exception when loading file for sending: {0}", file); return false; } return true; }
static void Main(string[] args) { if (args.Length == 0) { PrintCommandLine(); return; } if (false == ParseArgs(args)) return; foreach (String filename in args) { if (filename.StartsWith("-")) continue; DicomFile file = new DicomFile(filename); DicomReadOptions readOptions = DicomReadOptions.Default; try { file.Load(readOptions); } catch (Exception e) { Console.WriteLine("Unexpected exception when loading file: {0}", e.Message); } StringBuilder sb = new StringBuilder(); file.Dump(sb, "", _options); Console.WriteLine(sb.ToString()); } }
/// <summary> /// Reprocess a file systems /// </summary> /// <param name="filesystem"></param> private void ReprocessFilesystem(Filesystem filesystem) { var filesystemDir = new DirectoryInfo(filesystem.FilesystemPath); foreach (DirectoryInfo partitionDir in filesystemDir.GetDirectories()) { ServerPartition partition; if (GetServerPartition(partitionDir.Name, out partition) == false) { if (!partitionDir.Name.EndsWith("_Incoming") && !partitionDir.Name.Equals("temp") && !partitionDir.Name.Equals("ApplicationLog") && !partitionDir.Name.Equals("AlertLog")) Platform.Log(LogLevel.Error, "Unknown partition folder '{0}' in filesystem: {1}", partitionDir.Name, filesystem.Description); continue; } // Since we found a partition, we should find a rules engine too. ServerRulesEngine engine = _engines[partition]; ServerRulesEngine postArchivalEngine = _postArchivalEngines[partition]; ServerRulesEngine dataAccessEngine = _dataAccessEngine[partition]; foreach (DirectoryInfo dateDir in partitionDir.GetDirectories()) { if (dateDir.FullName.EndsWith("Deleted") || dateDir.FullName.EndsWith(ServerPlatform.ReconcileStorageFolder)) continue; foreach (DirectoryInfo studyDir in dateDir.GetDirectories()) { String studyInstanceUid = studyDir.Name; try { StudyStorageLocation location = LoadReadableStorageLocation(partition.GetKey(), studyInstanceUid); if (location == null) { foreach (DirectoryInfo seriesDir in studyDir.GetDirectories()) { FileInfo[] sopInstanceFiles = seriesDir.GetFiles("*.dcm"); DicomFile file = null; foreach (FileInfo sopFile in sopInstanceFiles) { if (!sopFile.FullName.EndsWith(ServerPlatform.DicomFileExtension)) continue; try { file = new DicomFile(sopFile.FullName); file.Load(DicomTags.StudyId, DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default); break; } catch (Exception e) { Platform.Log(LogLevel.Warn, e, "Unexpected failure loading file: {0}. Continuing to next file.", sopFile.FullName); file = null; } } if (file != null) { studyInstanceUid = file.DataSet[DicomTags.StudyInstanceUid].ToString(); break; } } location = LoadReadableStorageLocation(partition.GetKey(), studyInstanceUid); if (location == null) continue; } ProcessStudy(partition, location, engine, postArchivalEngine, dataAccessEngine); _stats.NumStudies++; if (CancelPending) return; } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected error while processing study: {0} on partition {1}.", studyInstanceUid, partition.Description); } } // Cleanup the directory, if its empty. DirectoryUtility.DeleteIfEmpty(dateDir.FullName); } } }
private void ProcessUidList() { string lastErrorMessage = ""; Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage); // Load the rules engine context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey); context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Add the update commands to context.UpdateCommands.AddRange(BuildUpdateCommandList()); // Add command to update the Series & Sop Instances. context.UpdateCommands.Add(new SeriesSopUpdateCommand(Context.WorkQueueItemStudyStorage, _destinationStudyStorage, UidMapper)); // Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); PrintUpdateCommands(context.UpdateCommands); foreach (WorkQueueUid uid in Context.WorkQueueUidList) { // Load the file outside the try/catch block so it can be // referenced in the c string imagePath = GetReconcileUidPath(uid); DicomFile file = new DicomFile(imagePath); try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, Context.Partition, Context.WorkQueueItem.InsertTime); SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context) {EnforceNameRules = true }; ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid)); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } _processedCount++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, _processedCount, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); if (e is InstanceAlreadyExistsException || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException) { DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid), Context.WorkQueueItem, uid); } else { lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } _failedCount++; } } if (_processedCount==0) { throw new ApplicationException(lastErrorMessage); } }
/// <summary> /// ProcessSavedFile a specified <see cref="WorkQueueUid"/> /// </summary> /// <param name="item">The <see cref="WorkQueue"/> item being processed</param> /// <param name="sop">The <see cref="WorkQueueUid"/> being processed</param> /// <param name="studyXml">The <see cref="StudyXml"/> object for the study being processed</param> /// <returns>true if the <see cref="WorkQueueUid"/> is successfully processed. false otherwise</returns> protected virtual bool ProcessWorkQueueUid(Model.WorkQueue item, WorkQueueUid sop, StudyXml studyXml) { Platform.CheckForNullReference(item, "item"); Platform.CheckForNullReference(sop, "sop"); Platform.CheckForNullReference(studyXml, "studyXml"); OnProcessUidBegin(item, sop); string path = null; try { if (sop.Duplicate && sop.Extension != null) { path = ServerHelper.GetDuplicateUidPath(StorageLocation, sop); var file = new DicomFile(path); file.Load(); InstancePreProcessingResult result = PreProcessFile(sop, file); if (false ==file.DataSet[DicomTags.StudyInstanceUid].ToString().Equals(StorageLocation.StudyInstanceUid) || result.DiscardImage) { RemoveWorkQueueUid(sop, null); } else { var duplicateResult = ProcessDuplicate(file, sop, studyXml); if (duplicateResult.ActionTaken == DuplicateProcessResultAction.Delete || duplicateResult.ActionTaken == DuplicateProcessResultAction.Accept) { // make sure the folder is also deleted if it's empty string folder = Path.GetDirectoryName(path); String reconcileRootFolder = ServerHelper.GetDuplicateFolderRootPath(StorageLocation); DirectoryUtility.DeleteIfEmpty(folder, reconcileRootFolder); } } } else { try { path = StorageLocation.GetSopInstancePath(sop.SeriesInstanceUid, sop.SopInstanceUid); var file = new DicomFile(path); file.Load(); InstancePreProcessingResult result = PreProcessFile(sop, file); if (false == file.DataSet[DicomTags.StudyInstanceUid].ToString().Equals(StorageLocation.StudyInstanceUid) || result.DiscardImage) { RemoveWorkQueueUid(sop, path); } else { ProcessFile(sop, file, studyXml, !result.AutoReconciled); } } catch (DicomException ex) { // bad file. Remove it from the filesystem and the queue RemoveBadDicomFile(path, ex.Message); DeleteWorkQueueUid(sop); return false; } } return true; } catch (StudyIsNearlineException) { // handled by caller throw; } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when processing file: {0} SOP Instance: {1}", path, sop.SopInstanceUid); item.FailureDescription = e.InnerException != null ? String.Format("{0}:{1}", e.GetType().Name, e.InnerException.Message) : String.Format("{0}:{1}", e.GetType().Name, e.Message); //No longer needed. Update was moved into the SopInstanceProcessor //sop.FailureCount++; //UpdateWorkQueueUid(sop); return false; } finally { OnProcessUidEnd(item, sop); } }
private ProcessDuplicateResult ProcessDuplicate(DicomFile dupFile, WorkQueueUid uid, StudyXml studyXml) { var result = new ProcessDuplicateResult(); var data = uid.SerializeWorkQueueUidData; string duplicateSopPath = ServerHelper.GetDuplicateUidPath(StorageLocation, uid); string basePath = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid); if (!File.Exists(basePath)) { // NOTE: This is special case. The file which caused dicom service to think this sop is a duplicate // no longer exists in the study folder. Perhaps it has been moved to another folder during auto reconciliation. // We have nothing to compare against so let's just throw it into the SIQ queue. CreateDuplicateSIQEntry(uid, dupFile, null); result.ActionTaken = DuplicateProcessResultAction.Reconcile; } else { var duplicateEnum = data.DuplicateProcessing.HasValue ? data.DuplicateProcessing.Value : DuplicateProcessingEnum.Compare; // Check if system is configured to override the rule for this study if (duplicateEnum == DuplicateProcessingEnum.OverwriteSop) { return OverwriteDuplicate(dupFile, uid, studyXml); } // Check if system is configured to override the rule for this study if (duplicateEnum == DuplicateProcessingEnum.OverwriteSopAndUpdateDatabase) { return OverwriteAndUpdateDuplicate(dupFile, uid, studyXml); } var baseFile = new DicomFile(basePath); baseFile.Load(); if (duplicateEnum == DuplicateProcessingEnum.OverwriteReport) { return ProcessDuplicateReport(dupFile, baseFile, uid, studyXml); } // DuplicateProcessingEnum.Compare if (!dupFile.TransferSyntax.Equals(baseFile.TransferSyntax)) { // If they're compressed, and we have a codec, lets decompress and still do the comparison if (dupFile.TransferSyntax.Encapsulated && !dupFile.TransferSyntax.LossyCompressed && DicomCodecRegistry.GetCodec(dupFile.TransferSyntax) != null) { dupFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian); } if (baseFile.TransferSyntax.Encapsulated && !baseFile.TransferSyntax.LossyCompressed && DicomCodecRegistry.GetCodec(baseFile.TransferSyntax) != null) { baseFile.ChangeTransferSyntax(TransferSyntax.ExplicitVrLittleEndian); } if (dupFile.TransferSyntax.Encapsulated || baseFile.TransferSyntax.Encapsulated) { string failure = String.Format("Base file transfer syntax is '{0}' while duplicate file has '{1}'", baseFile.TransferSyntax, dupFile.TransferSyntax); var list = new List<DicomAttributeComparisonResult>(); var compareResult = new DicomAttributeComparisonResult { ResultType = ComparisonResultType.DifferentValues, TagName = DicomTagDictionary.GetDicomTag(DicomTags.TransferSyntaxUid).Name, Details = failure }; list.Add(compareResult); CreateDuplicateSIQEntry(uid, dupFile, list); result.ActionTaken = DuplicateProcessResultAction.Reconcile; return result; } } var failureReason = new List<DicomAttributeComparisonResult>(); if (baseFile.DataSet.Equals(dupFile.DataSet, ref failureReason)) { Platform.Log(LogLevel.Info, "Duplicate SOP being processed is identical. Removing SOP: {0}", baseFile.MediaStorageSopInstanceUid); RemoveWorkQueueUid(uid, duplicateSopPath); result.ActionTaken = DuplicateProcessResultAction.Delete; } else { CreateDuplicateSIQEntry(uid, dupFile, failureReason); result.ActionTaken = DuplicateProcessResultAction.Reconcile; } } return result; }
/// <summary> /// Called to insert a DICOM file into the directory record structure. /// </summary> /// <param name="dicomFile"></param> /// <param name="optionalRelativeRootPath"></param> private void InsertFile(DicomFile dicomFile, string optionalRelativeRootPath) { try { if (dicomFile.DataSet.Count == 0) dicomFile.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); DirectoryRecordSequenceItem patientRecord; DirectoryRecordSequenceItem studyRecord; DirectoryRecordSequenceItem seriesRecord; if (_rootRecord == null) _rootRecord = patientRecord = CreatePatientItem(dicomFile); else patientRecord = GetExistingOrCreateNewPatient(_rootRecord, dicomFile); if (patientRecord.LowerLevelDirectoryRecord == null) patientRecord.LowerLevelDirectoryRecord = studyRecord = CreateStudyItem(dicomFile); else studyRecord = GetExistingOrCreateNewStudy(patientRecord.LowerLevelDirectoryRecord, dicomFile); if (studyRecord.LowerLevelDirectoryRecord == null) studyRecord.LowerLevelDirectoryRecord = seriesRecord = CreateSeriesItem(dicomFile); else seriesRecord = GetExistingOrCreateNewSeries(studyRecord.LowerLevelDirectoryRecord, dicomFile); if (seriesRecord.LowerLevelDirectoryRecord == null) seriesRecord.LowerLevelDirectoryRecord = CreateImageItem(dicomFile, optionalRelativeRootPath); else GetExistingOrCreateNewImage(seriesRecord.LowerLevelDirectoryRecord, dicomFile, optionalRelativeRootPath); } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Error adding image {0} to directory file", dicomFile.Filename); throw; } }
/// <summary> /// Simple class to load a sample image file from the study. /// </summary> /// <returns></returns> private DicomFile LoadFileFromStudyXml() { DicomFile defaultFile = null; foreach (SeriesXml seriesXml in _studyXml) foreach (InstanceXml instanceXml in seriesXml) { // Skip non-image objects string path; if (instanceXml.SopClass.Equals(SopClass.KeyObjectSelectionDocumentStorage) || instanceXml.SopClass.Equals(SopClass.GrayscaleSoftcopyPresentationStateStorageSopClass) || instanceXml.SopClass.Equals(SopClass.BlendingSoftcopyPresentationStateStorageSopClass) || instanceXml.SopClass.Equals(SopClass.ColorSoftcopyPresentationStateStorageSopClass)) { if (defaultFile == null) { path = Path.Combine(_storageLocation.GetStudyPath(), seriesXml.SeriesInstanceUid); path = Path.Combine(path, instanceXml.SopInstanceUid); path += ServerPlatform.DicomFileExtension; defaultFile = new DicomFile(path); defaultFile.Load(path); } continue; } path = Path.Combine(_storageLocation.GetStudyPath(), seriesXml.SeriesInstanceUid); path = Path.Combine(path, instanceXml.SopInstanceUid); path += ServerPlatform.DicomFileExtension; defaultFile = new DicomFile(path); defaultFile.Load(path); return defaultFile; } return defaultFile; }
private DicomFile LoadDuplicateDicomFile(WorkQueueUid uid, bool skipPixelData) { FileInfo duplicateFile = GetDuplicateSopFile(uid); Platform.CheckTrue(duplicateFile.Exists, String.Format("Duplicate SOP doesn't exist at {0}", uid.SopInstanceUid)); DicomFile file = new DicomFile(duplicateFile.FullName); file.Load(skipPixelData ? DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default : DicomReadOptions.Default); return file; }
private void RemoveExistingImage(WorkQueueUid uid) { string path = StorageLocation.GetSopInstancePath(uid.SeriesInstanceUid, uid.SopInstanceUid); if (!File.Exists(path)) return; StudyXml studyXml = StorageLocation.LoadStudyXml(); var file = new DicomFile(path); file.Load(DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default); // don't need to load pixel data cause we will delete it #if DEBUG int originalInstanceCountInXml = studyXml.NumberOfStudyRelatedInstances; int originalStudyInstanceCount = Study.NumberOfStudyRelatedInstances; int originalSeriesInstanceCount = Study.Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances; #endif using (var processor = new ServerCommandProcessor("Delete Existing Image")) { var seriesInstanceUid = file.DataSet[DicomTags.SeriesInstanceUid].ToString(); var sopInstanceUid = file.DataSet[DicomTags.SopInstanceUid].ToString(); processor.AddCommand(new FileDeleteCommand(path,true)); processor.AddCommand(new RemoveInstanceFromStudyXmlCommand(StorageLocation, studyXml, seriesInstanceUid, sopInstanceUid)); processor.AddCommand(new UpdateInstanceCountCommand(StorageLocation, seriesInstanceUid,sopInstanceUid)); if (!processor.Execute()) { throw new ApplicationException(String.Format("Unable to remove existing image {0}", file.Filename), processor.FailureException); } } #if DEBUG Debug.Assert(!File.Exists(path)); Debug.Assert(studyXml.NumberOfStudyRelatedInstances == originalInstanceCountInXml - 1); Debug.Assert(Study.Load(Study.Key).NumberOfStudyRelatedInstances == originalStudyInstanceCount - 1); Debug.Assert(Study.Load(Study.Key).Series[uid.SeriesInstanceUid].NumberOfSeriesRelatedInstances == originalSeriesInstanceCount - 1); #endif }
public LocalStudyItem(string filename) { _filename = filename; _dcf = new DicomFile(filename); _dcf.Load(DicomReadOptions.Default | DicomReadOptions.StorePixelDataReferences); }
/// <summary> /// Creates an instance of <see cref="DicomPixelData"/> from specified stream /// </summary> /// <param name="stream"></param> /// <returns> /// </returns> public static DicomPixelData CreateFrom(Stream stream) { DicomFile file = new DicomFile(); file.Load(stream); return CreateFrom(file); }
private void ProcessUidList() { int counter = 0; Platform.Log(LogLevel.Info, "Populating new images into study folder.. {0} to go", Context.WorkQueueUidList.Count); StudyProcessorContext context = new StudyProcessorContext(_destinationStudyStorage); // Load the rules engine context.SopProcessedRulesEngine = new ServerRulesEngine(ServerRuleApplyTimeEnum.SopProcessed, Context.WorkQueueItem.ServerPartitionKey); context.SopProcessedRulesEngine.AddOmittedType(ServerRuleTypeEnum.SopCompress); context.SopProcessedRulesEngine.Load(); // Load the Study XML File StudyXml xml = LoadStudyXml(_destinationStudyStorage); string lastErrorMessage=""; foreach (WorkQueueUid uid in Context.WorkQueueUidList) { string imagePath = GetReconcileUidPath(uid); DicomFile file = new DicomFile(imagePath); try { file.Load(); string groupID = ServerHelper.GetUidGroup(file, _destinationStudyStorage.ServerPartition, Context.WorkQueueItem.InsertTime); SopInstanceProcessor sopProcessor = new SopInstanceProcessor(context); ProcessingResult result = sopProcessor.ProcessFile(groupID, file, xml, false, true, uid, GetReconcileUidPath(uid)); if (result.Status != ProcessingStatus.Success) { throw new ApplicationException(String.Format("Unable to reconcile image {0}", file.Filename)); } counter++; Platform.Log(ServerPlatform.InstanceLogLevel, "Reconciled SOP {0} [{1} of {2}]", uid.SopInstanceUid, counter, Context.WorkQueueUidList.Count); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error occurred when processing uid {0}", uid.SopInstanceUid); if (e is InstanceAlreadyExistsException || e.InnerException != null && e.InnerException is InstanceAlreadyExistsException) { DuplicateSopProcessorHelper.CreateDuplicateSIQEntry(file, _destinationStudyStorage, GetReconcileUidPath(uid), Context.WorkQueueItem, uid); } else { lastErrorMessage = e.Message; SopInstanceProcessor.FailUid(uid, true); } } } if (counter == 0) { throw new ApplicationException(lastErrorMessage); } }
public void SearchDirectories(DirectoryInfo dir) { FileInfo[] files = dir.GetFiles(); Platform.Log(LogLevel.Info, "Scanning directory: {0}", dir.FullName); foreach (FileInfo file in files) { DicomFile dicomFile = new DicomFile(file.FullName); try { Platform.Log(LogLevel.Info, "Checking file: {0}", file.FullName); dicomFile.Load(DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default); if (SearchAttributeSet(dicomFile.DataSet, file.FullName, SearchTypes.PaletteColor)) { string destination = Path.Combine(DestinationDirectory, _imageCount + ServerPlatform.DicomFileExtension); if (File.Exists(destination)) File.Delete(destination); File.Copy(file.FullName,destination); _imageCount++; } } catch (Exception) { // TODO: Add some logging for failed files } } String[] subdirectories = Directory.GetDirectories(dir.FullName); foreach (String subPath in subdirectories) { DirectoryInfo subDir = new DirectoryInfo(subPath); SearchDirectories(subDir); continue; } }
public void HandleAllGetRequests(HttpListenerContext context) { var rc = new ResponseClass(); string studyuid = context.Request.QueryString["studyuid"]; string seriesuid = context.Request.QueryString["seriesuid"]; string check = context.Request.QueryString["check"]; if (!string.IsNullOrEmpty(check)) { rc.Success = true; rc.Message = ""; rc.Data = true; string json = Newtonsoft.Json.JsonConvert.SerializeObject(rc); this.SendTextResponse(context, json); return; } var node = ADCM.GetSelectedNode(); try { LOG.Write("New request"); if (string.IsNullOrEmpty(studyuid) || string.IsNullOrEmpty(seriesuid)) { throw new Exception("No studyuid or seriesuid provided"); } bool downloaded = ADCM.DownloadOneSeries(studyuid, seriesuid); if (!downloaded) { throw new Exception("Unable to download study"); } string seriesPath = Path.Combine(node.LocalStorage, studyuid, seriesuid); if (!Directory.Exists(seriesPath)) { throw new Exception("Series path not found: " + seriesPath); } var dcmFiles = Directory.GetFiles(seriesPath, "*.dcm"); string filetouse = null; decimal mid = dcmFiles.Length / 2; int index = (int)Math.Ceiling(mid); for (int i = index; i < dcmFiles.Length; i++) { var dcm = dcmFiles[i]; ClearCanvas.Dicom.DicomFile dcmFile = new ClearCanvas.Dicom.DicomFile(dcm); dcmFile.Load(); ClearCanvas.ImageViewer.StudyManagement.LocalSopDataSource localds = new ClearCanvas.ImageViewer.StudyManagement.LocalSopDataSource(dcmFile); if (!localds.IsImage) { continue; } else { filetouse = dcm; break; } } if (string.IsNullOrEmpty(filetouse)) { for (int i = 0; i < dcmFiles.Length; i++) { var dcm = dcmFiles[i]; ClearCanvas.Dicom.DicomFile dcmFile = new ClearCanvas.Dicom.DicomFile(dcm); dcmFile.Load(); ClearCanvas.ImageViewer.StudyManagement.LocalSopDataSource localds = new ClearCanvas.ImageViewer.StudyManagement.LocalSopDataSource(dcmFile); if (!localds.IsImage) { continue; } else { filetouse = dcm; break; } } } if (string.IsNullOrEmpty(filetouse)) { throw new Exception("Unable to find image in downloaded DICOM files"); } if (!File.Exists(filetouse)) { throw new Exception("Unable to find DICOM file to use"); } string base64String = Convert.ToBase64String(AIMG.GetImageBytesFromDcm(filetouse)); base64String = "data:image/jpeg;base64," + base64String; rc.Success = true; rc.Message = ""; rc.Data = base64String; string json = Newtonsoft.Json.JsonConvert.SerializeObject(rc); this.SendTextResponse(context, json); } catch (Exception ex) { LOG.Write("ERROR: " + ex.Message); rc.Data = null; rc.Success = false; rc.Message = ex.Message; string json = Newtonsoft.Json.JsonConvert.SerializeObject(rc); this.SendTextResponse(context, json); } finally { string studypath = Path.Combine(node.LocalStorage, studyuid); if (Directory.Exists(studypath)) { Directory.Delete(studypath, true); } } }
private void ScanDirectories(DirectoryInfo dir) { FileInfo[] files = dir.GetFiles(); Platform.Log(LogLevel.Info, "Scanning directory: {0}", dir.FullName); foreach (FileInfo file in files) { DicomFile dicomFile = new DicomFile(file.FullName); if (dicomFile.SopClass.Equals(SopClass.MediaStorageDirectoryStorage)) continue; try { dicomFile.Load(DicomReadOptions.DoNotStorePixelDataInDataSet | DicomReadOptions.Default); string destination = DestinationDirectory; destination = Path.Combine(destination, CreateVariableName(dicomFile.TransferSyntax.Name)); destination = Path.Combine(destination, dicomFile.DataSet[DicomTags.StudyInstanceUid].ToString()); destination = Path.Combine(destination, dicomFile.DataSet[DicomTags.SeriesInstanceUid].ToString()); try { Directory.CreateDirectory(destination); string filename = Path.Combine(destination, dicomFile.MediaStorageSopInstanceUid + ServerPlatform.DicomFileExtension); if (File.Exists(filename)) { Platform.Log(LogLevel.Info, "File has already been stored in destination folder: {0}", file.FullName); File.Delete(file.FullName); } else File.Move(file.FullName, filename); } catch (Exception) { } } catch (Exception) { // TODO: Add some logging for failed files } } String[] subdirectories = Directory.GetDirectories(dir.FullName); foreach (String subPath in subdirectories) { DirectoryInfo subDir = new DirectoryInfo(subPath); ScanDirectories(subDir); continue; } }
private void SaveFile(string filename) { if (_anonymizer != null) { DicomFile dicomFile = new DicomFile(filename); dicomFile.Load(); _anonymizer.Anonymize(dicomFile); //anonymize first, then audit, since this is what gets exported. _exportedInstances.AddInstance( dicomFile.DataSet[DicomTags.PatientId].ToString(), dicomFile.DataSet[DicomTags.PatientsName].ToString(), dicomFile.DataSet[DicomTags.StudyInstanceUid].ToString(), filename); string fileName = System.IO.Path.Combine(OutputPath, dicomFile.MediaStorageSopInstanceUid); fileName += ".dcm"; CheckFileExists(fileName); // this will never happen for anonymized images. if (_canceled) return; dicomFile.Save(fileName); } else { _exportedInstances.AddPath(filename, false); string destination = Path.Combine(OutputPath, Path.GetFileName(filename)); CheckFileExists(destination); if (_canceled) return; File.Copy(filename, destination, true); } }
public void Apply(ServerRuleApplyTimeEnum applyTime, CommandProcessor theProcessor) { try { if (_studyRulesEngine == null || !_studyRulesEngine.RuleApplyTime.Equals(applyTime)) { _studyRulesEngine = new ServerRulesEngine(applyTime, _location.ServerPartitionKey); _studyRulesEngine.Load(); } List<string> files = GetFirstInstanceInEachStudySeries(); if (files.Count == 0) { string message = String.Format("Unexpectedly unable to find SOP instances for rules engine in each series in study: {0}", _location.StudyInstanceUid); Platform.Log(LogLevel.Error, message); throw new ApplicationException(message); } Platform.Log(LogLevel.Info, "Processing Study Level rules for study {0} on partition {1} at {2} apply time", _location.StudyInstanceUid, _partition.Description, applyTime.Description); foreach (string seriesFilePath in files) { var theFile = new DicomFile(seriesFilePath); theFile.Load(DicomReadOptions.Default); var context = new ServerActionContext(theFile, _location.FilesystemKey, _partition, _location.Key, theProcessor){ RuleEngine = _studyRulesEngine}; _studyRulesEngine.Execute(context); ProcessSeriesRules(theFile, theProcessor); } if (applyTime.Equals(ServerRuleApplyTimeEnum.StudyProcessed)) { // This is a bit kludgy, but we had a problem with studies with only 1 image incorectlly // having archive requests inserted when they were scheduled for deletion. Calling // this command here so that if a delete is inserted at the study level, we will remove // the previously inserted archive request for the study. Note also this has to be done // after the rules engine is executed. theProcessor.AddCommand(new InsertArchiveQueueCommand(_location.ServerPartitionKey, _location.Key)); } } finally { if (_studyRulesEngine!=null) _studyRulesEngine.Complete(_studyRulesEngine.RulesApplied); } }
protected override void ProcessItem(Model.WorkQueue item) { Platform.CheckForNullReference(item, "item"); Platform.CheckForNullReference(item.StudyStorageKey, "item.StudyStorageKey"); var context = new StudyProcessorContext(StorageLocation, WorkQueueItem); // TODO: Should we enforce the patient's name rule? // If we do, the Study record will have the new patient's name // but how should we handle the name in the Patient record? const bool enforceNameRules = false; var processor = new SopInstanceProcessor(context) { EnforceNameRules = enforceNameRules}; var seriesMap = new Dictionary<string, List<string>>(); bool successful = true; string failureDescription = null; // The processor stores its state in the Data column ReadQueueData(item); if (_queueData.State == null || !_queueData.State.ExecuteAtLeastOnce) { // Added for ticket #9673: // If the study folder does not exist and the study has been archived, trigger a restore and we're done if (!Directory.Exists(StorageLocation.GetStudyPath())) { if (StorageLocation.ArchiveLocations.Count > 0) { Platform.Log(LogLevel.Info, "Reprocessing archived study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4} without study data on the filesystem. Inserting Restore Request.", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); // Post process had to be done first so the study is unlocked so the RestoreRequest can be inserted. ServerHelper.InsertRestoreRequest(StorageLocation); RaiseAlert(WorkQueueItem, AlertLevel.Warning, string.Format( "Found study {0} for Patient {1} (A#:{2})on Partition {3} without storage folder, restoring study.", Study.StudyInstanceUid, Study.PatientsName, Study.AccessionNumber, ServerPartition.Description)); return; } } if (Study == null) Platform.Log(LogLevel.Info, "Reprocessing study {0} on Partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); else Platform.Log(LogLevel.Info, "Reprocessing study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); CleanupDatabase(); } else { if (_queueData.State.Completed) { #region SAFE-GUARD CODE: PREVENT INFINITE LOOP // The processor indicated it had completed reprocessing in previous run. The entry should have been removed and this block of code should never be called. // However, we have seen ReprocessStudy entries that mysterously contain rows in the WorkQueueUid table. // The rows prevent the entry from being removed from the database and the ReprocessStudy keeps repeating itself. // update the state first, increment the CompleteAttemptCount _queueData.State.ExecuteAtLeastOnce = true; _queueData.State.Completed = true; _queueData.State.CompleteAttemptCount++; SaveState(item, _queueData); if (_queueData.State.CompleteAttemptCount < 10) { // maybe there was db error in previous attempt to remove the entry. Let's try again. Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} but it was already completed!!!", StorageLocation.StudyInstanceUid); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { // we are definitely stuck. Platform.Log(LogLevel.Error, "ReprocessStudy {0} for study {1} appears stuck. Aborting it.", item.Key, StorageLocation.StudyInstanceUid); item.FailureDescription = "This entry had completed but could not be removed."; PostProcessingFailure(item, WorkQueueProcessorFailureType.Fatal); } return; #endregion } if (Study == null) Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} on Partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); else Platform.Log(LogLevel.Info, "Resuming Reprocessing study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description); } StudyXml studyXml = LoadStudyXml(); int reprocessedCounter = 0; var removedFiles = new List<FileInfo>(); try { // Traverse the directories, process 500 files at a time FileProcessor.Process(StorageLocation.GetStudyPath(), "*.*", delegate(string path, out bool cancel) { #region Reprocess File var file = new FileInfo(path); // ignore all files except those ending ".dcm" // ignore "bad(0).dcm" files too if (Regex.IsMatch(file.Name.ToUpper(), "[0-9]+\\.DCM$")) { try { var dicomFile = new DicomFile(path); dicomFile.Load(DicomReadOptions.StorePixelDataReferences | DicomReadOptions.Default); string seriesUid = dicomFile.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty); string instanceUid =dicomFile.DataSet[DicomTags.SopInstanceUid].GetString(0,string.Empty); if (studyXml.Contains(seriesUid, instanceUid)) { if (!seriesMap.ContainsKey(seriesUid)) { seriesMap.Add(seriesUid, new List<string>()); } if (!seriesMap[seriesUid].Contains(instanceUid)) seriesMap[seriesUid].Add(instanceUid); else { Platform.Log(LogLevel.Warn, "SOP Instance UID in {0} appears more than once in the study.", path); } } else { Platform.Log(ServerPlatform.InstanceLogLevel, "Reprocessing SOP {0} for study {1}",instanceUid, StorageLocation.StudyInstanceUid); string groupId = ServerHelper.GetUidGroup(dicomFile, StorageLocation.ServerPartition, WorkQueueItem.InsertTime); ProcessingResult result = processor.ProcessFile(groupId, dicomFile, studyXml, true, false, null, null, SopInstanceProcessorSopType.ReprocessedSop); switch (result.Status) { case ProcessingStatus.Success: reprocessedCounter++; if (!seriesMap.ContainsKey(seriesUid)) { seriesMap.Add(seriesUid, new List<string>()); } if (!seriesMap[seriesUid].Contains(instanceUid)) seriesMap[seriesUid].Add(instanceUid); else { Platform.Log(LogLevel.Warn, "SOP Instance UID in {0} appears more than once in the study.", path); } break; case ProcessingStatus.Reconciled: Platform.Log(LogLevel.Warn, "SOP was unexpectedly reconciled on reprocess SOP {0} for study {1}. It will be removed from the folder.", instanceUid, StorageLocation.StudyInstanceUid); failureDescription = String.Format("SOP Was reconciled: {0}", instanceUid); // Added for #10620 (Previously we didn't do anything here) // Because we are reprocessing files in the study folder, when file needs to be reconciled it is copied to the reconcile folder // Therefore, we need to delete the one in the study folder. Otherwise, there will be problem when the SIQ entry is reconciled. // InstanceAlreadyExistsException will also be thrown by the SOpInstanceProcessor if this ReprocessStudy WQI // resumes and reprocesses the same file again. // Note: we are sure that the file has been copied to the Reconcile folder and there's no way back. // We must get rid of this file in the study folder. FileUtils.Delete(path); // Special handling: if the file is one which we're supposed to reprocess at the end (see ProcessAdditionalFiles), we must remove the file from the list if (_additionalFilesToProcess != null && _additionalFilesToProcess.Contains(path)) { _additionalFilesToProcess.Remove(path); } break; } } } catch (DicomException ex) { // TODO : should we fail the reprocess instead? Deleting an dicom file can lead to incomplete study. removedFiles.Add(file); Platform.Log(LogLevel.Warn, "Skip reprocessing and delete {0}: Not readable.", path); FileUtils.Delete(path); failureDescription = ex.Message; } } else if (!file.Extension.Equals(".xml") && !file.Extension.Equals(".gz")) { // not a ".dcm" or header file, delete it removedFiles.Add(file); FileUtils.Delete(path); } #endregion if (reprocessedCounter>0 && reprocessedCounter % 200 == 0) { Platform.Log(LogLevel.Info, "Reprocessed {0} files for study {1}", reprocessedCounter, StorageLocation.StudyInstanceUid); } cancel = reprocessedCounter >= 5000; }, true); if (studyXml != null) { EnsureConsistentObjectCount(studyXml, seriesMap); SaveStudyXml(studyXml); } // Completed if either all files have been reprocessed // or no more dicom files left that can be reprocessed. _completed = reprocessedCounter == 0; } catch (Exception e) { successful = false; failureDescription = e.Message; Platform.Log(LogLevel.Error, e, "Unexpected exception when reprocessing study: {0}", StorageLocation.StudyInstanceUid); Platform.Log(LogLevel.Error, "Study may be in invalid unprocessed state. Study location: {0}", StorageLocation.GetStudyPath()); throw; } finally { LogRemovedFiles(removedFiles); // Update the state _queueData.State.ExecuteAtLeastOnce = true; _queueData.State.Completed = _completed; _queueData.State.CompleteAttemptCount++; SaveState(item, _queueData); if (!successful) { FailQueueItem(item, failureDescription); } else { if (!_completed) { // Put it back to Pending PostProcessing(item, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } else { LogHistory(); // Run Study / Series Rules Engine. var engine = new StudyRulesEngine(StorageLocation, ServerPartition); engine.Apply(ServerRuleApplyTimeEnum.StudyProcessed); // Log the FilesystemQueue related entries StorageLocation.LogFilesystemQueue(); PostProcessing(item, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); Platform.Log(LogLevel.Info, "Completed reprocessing of study {0} on partition {1}", StorageLocation.StudyInstanceUid, ServerPartition.Description); } } } }
/// <summary> /// Generic routine to send the next C-STORE-RQ message in the _fileList. /// </summary> /// <param name="client">DICOM Client class</param> /// <param name="association">Association Parameters</param> public bool SendCStore(DicomClient client, ClientAssociationParameters association) { FileToSend fileToSend = _fileList[_fileListIndex]; DicomFile dicomFile = new DicomFile(fileToSend.filename); try { dicomFile.Load(DicomReadOptions.Default); } catch (DicomException e) { Logger.LogErrorException(e, "Unexpected exception when loading DICOM file {0}",fileToSend.filename); return false; } DicomMessage msg = new DicomMessage(dicomFile); byte pcid = association.FindAbstractSyntaxWithTransferSyntax(fileToSend.sopClass, dicomFile.TransferSyntax); if (pcid == 0) { if (dicomFile.TransferSyntax.Equals(TransferSyntax.ImplicitVrLittleEndian)) pcid = association.FindAbstractSyntaxWithTransferSyntax(fileToSend.sopClass, TransferSyntax.ExplicitVrLittleEndian); if (pcid == 0) { Logger.LogError( "Unable to find matching negotiated presentation context for sop {0} and syntax {1}", dicomFile.SopClass.Name, dicomFile.TransferSyntax.Name); return false; } } client.SendCStoreRequest(pcid, client.NextMessageID(), DicomPriority.Medium, msg); return true; }