protected override void Dispose(bool disposing) { if (disposing) { _frame = null; _dicomFile = null; if (_imageSop != null) { _imageSop.Dispose(); _imageSop = null; } if (_sopDataSource != null) { _sopDataSource.Dispose(); _sopDataSource = null; } if (_filename != null) { if (File.Exists(_filename)) File.Delete(_filename); _filename = null; } } base.Dispose(disposing); }
public void RetrieveFrame(Frame frame) { Interlocked.Increment(ref _activeRetrieveThreads); try { string message = String.Format("Retrieving Frame (active threads: {0})", Thread.VolatileRead(ref _activeRetrieveThreads)); Trace.WriteLine(message); IStreamingSopDataSource dataSource = (IStreamingSopDataSource) frame.ParentImageSop.DataSource; IStreamingSopFrameData frameData = dataSource.GetFrameData(frame.FrameNumber); frameData.RetrievePixelData(); } catch (OutOfMemoryException) { Platform.Log(LogLevel.Error, "Out of memory trying to retrieve pixel data."); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error retrieving frame pixel data."); } finally { Interlocked.Decrement(ref _activeRetrieveThreads); } }
/// <summary> /// Constructor (for use by <see cref="Frame"/> class only). /// </summary> internal ImagePlaneHelper(Frame frame) : this(frame.ImagePositionPatient, frame.ImageOrientationPatient, frame.PixelSpacing, frame.Rows, frame.Columns) { // this constructor is internal because it keeps references to the source frame's position, orientation and spacing properties // if the frame is changed or disposed, the calculations made by this instance would be affected // thus, only the Frame class should use this constructor, since it will thus have the same lifetime as the referenced objects // if you need to create an instance separated from a frame, use the other constructor with appropriate clones of the position, orientation and spacing }
public bool CanDecompressFrame(Frame frame) { if (!(frame.ParentImageSop.DataSource is StreamingSopDataSource)) return false; StreamingSopDataSource dataSource = (StreamingSopDataSource) frame.ParentImageSop.DataSource; IStreamingSopFrameData frameData = dataSource.GetFrameData(frame.FrameNumber); return frameData.PixelDataRetrieved; }
public bool CanDecompressFrame(Frame frame) { var streamingFrame = frame.ParentImageSop.DataSource as StreamingSopDataSource; if (streamingFrame == null) return false; var frameData = streamingFrame.GetFrameData(frame.FrameNumber); return frameData.PixelDataRetrieved; }
private static IEnumerable<IComparable> GetCompareValues(Frame frame) { //Group be common study level attributes yield return frame.StudyInstanceUid; //Group by common series level attributes //This sorts "FOR PRESENTATION" images to the beginning (except in reverse, of course). yield return frame.ParentImageSop.PresentationIntentType == "FOR PRESENTATION" ? 0 : 1; yield return frame.ParentImageSop.SeriesNumber; yield return frame.ParentImageSop.SeriesDescription; yield return frame.SeriesInstanceUid; yield return frame.FrameOfReferenceUid; double? normalX = null, normalY = null, normalZ = null; double? zImagePlane = null; Vector3D normal = frame.ImagePlaneHelper.GetNormalVector(); if (normal != null) { // Return the 3 components of the image normal; if they are all equal // then the images are in the same plane. We are disregarding // the rare case where the 2 normals being compared are the negative // of each other - technically, they could be considered to be in the // same 'plane', but for the purposes of sorting, we won't consider it. normalX = Math.Round(normal.X, 3, MidpointRounding.AwayFromZero); normalY = Math.Round(normal.Y, 3, MidpointRounding.AwayFromZero); normalZ = Math.Round(normal.Z, 3, MidpointRounding.AwayFromZero); Vector3D positionPatient = frame.ImagePlaneHelper.ConvertToPatient(new PointF((frame.Columns - 1) / 2F, (frame.Rows - 1) / 2F)); if (positionPatient != null) { Vector3D positionImagePlane = frame.ImagePlaneHelper.ConvertToImagePlane(positionPatient, Vector3D.Null); //return only the z-component of the image plane position (where the origin remains at the patient origin). zImagePlane = Math.Round(positionImagePlane.Z, 3, MidpointRounding.AwayFromZero); } } yield return normalX; yield return normalY; yield return normalZ; yield return zImagePlane; //as a last resort. yield return frame.ParentImageSop.InstanceNumber; yield return frame.FrameNumber; yield return frame.AcquisitionNumber; }
private static IEnumerable<IComparable> GetCompareValues(Frame frame) {//Group be common study level attributes yield return frame.StudyInstanceUid; //Group by common series level attributes //This sorts "FOR PRESENTATION" images to the beginning (except in reverse, of course). yield return frame.ParentImageSop.PresentationIntentType == "FOR PRESENTATION" ? 0 : 1; yield return frame.ParentImageSop.SeriesNumber; yield return frame.ParentImageSop.SeriesDescription; yield return frame.SeriesInstanceUid; yield return frame.ParentImageSop.InstanceNumber; yield return frame.FrameNumber; //as a last resort. yield return frame.AcquisitionNumber; }
public void DecompressFrame(Frame frame) { try { //TODO: try to trigger header retrieval for data luts? frame.GetNormalizedPixelData(); } catch (OutOfMemoryException) { Platform.Log(LogLevel.Error, "Out of memory trying to decompress pixel data."); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error decompressing frame pixel data."); } }
public MockDicomPresentationImage(string filename) : base(new GrayscaleImageGraphic(10, 10)) { if (Path.IsPathRooted(filename)) _filename = filename; else _filename = Path.Combine(Environment.CurrentDirectory, filename); _dicomFile = new DicomFile(); _dicomFile.DataSet[DicomTags.SopClassUid].SetStringValue(SopClass.SecondaryCaptureImageStorageUid); _dicomFile.DataSet[DicomTags.SopInstanceUid].SetStringValue(DicomUid.GenerateUid().UID); _dicomFile.MetaInfo[DicomTags.MediaStorageSopClassUid].SetStringValue(_dicomFile.DataSet[DicomTags.SopClassUid].ToString()); _dicomFile.MetaInfo[DicomTags.MediaStorageSopInstanceUid].SetStringValue(_dicomFile.DataSet[DicomTags.SopInstanceUid].ToString()); _dicomFile.Save(_filename); _sopDataSource = new LocalSopDataSource(_dicomFile); _imageSop = new ImageSop(_sopDataSource); _frame = new MockFrame(_imageSop, 1); }
public void RetrieveFrame(Frame frame) { try { var dataSource = (IStreamingSopDataSource) frame.ParentImageSop.DataSource; var frameData = dataSource.GetFrameData(frame.FrameNumber); frameData.RetrievePixelData(); } catch (OutOfMemoryException) { Platform.Log(LogLevel.Error, "Out of memory trying to retrieve pixel data."); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error retrieving frame pixel data."); } }
protected override IPresentationImage CreateImage(Frame frame) { if (frame.PhotometricInterpretation == PhotometricInterpretation.Unknown) throw new Exception("Photometric interpretation is unknown."); IDicomPresentationImage image; // TODO (CR Apr 2013): Since it's the "async" factory, it probably should only deal in AsyncFrames. Just throw NotSupportedException? if (!frame.PhotometricInterpretation.IsColor) image = frame is AsyncFrame ? new AsyncDicomGrayscalePresentationImage((AsyncFrame) frame) : new DicomGrayscalePresentationImage(frame); else image = new DicomColorPresentationImage(frame); if (image.PresentationState == null || Equals(image.PresentationState, PresentationState.DicomDefault)) image.PresentationState = DefaultPresentationState; return image; }
private static IEnumerable<IComparable> GetCompareValues(Frame frame) { //Group be common study level attributes yield return frame.StudyInstanceUid; //Group by common series level attributes //This sorts "FOR PRESENTATION" images to the beginning (except in reverse, of course). yield return frame.ParentImageSop.PresentationIntentType == "FOR PRESENTATION" ? 0 : 1; yield return frame.ParentImageSop.SeriesNumber; yield return frame.ParentImageSop.SeriesDescription; yield return frame.SeriesInstanceUid; DateTime? datePart = null; TimeSpan? timePart = null; //then sort by acquisition datetime. DateTime? acquisitionDateTime = DateTimeParser.Parse(frame.AcquisitionDateTime); if (acquisitionDateTime != null) { datePart = acquisitionDateTime.Value.Date; timePart = acquisitionDateTime.Value.TimeOfDay; } else { datePart = DateParser.Parse(frame.AcquisitionDate); if (datePart != null) { //only set the time part if there is a valid date part. DateTime? acquisitionTime = TimeParser.Parse(frame.AcquisitionTime); if (acquisitionTime != null) timePart = acquisitionTime.Value.TimeOfDay; } } yield return datePart; yield return timePart; //as a last resort. yield return frame.ParentImageSop.InstanceNumber; yield return frame.FrameNumber; yield return frame.AcquisitionNumber; }
private static DynamicTePresentationImage CreateT2Image(ImageSop imageSop, Frame frame) { DicomFile pdMap = FindMap(imageSop.StudyInstanceUID, frame.SliceLocation, "PD"); pdMap.Load(DicomReadOptions.Default); DicomFile t2Map = FindMap(imageSop.StudyInstanceUID, frame.SliceLocation, "T2"); t2Map.Load(DicomReadOptions.Default); DicomFile probMap = FindMap(imageSop.StudyInstanceUID, frame.SliceLocation, "CHI2PROB"); probMap.Load(DicomReadOptions.Default); DynamicTePresentationImage t2Image = new DynamicTePresentationImage( frame, (byte[])pdMap.DataSet[DicomTags.PixelData].Values, (byte[])t2Map.DataSet[DicomTags.PixelData].Values, (byte[])probMap.DataSet[DicomTags.PixelData].Values); t2Image.DynamicTe.Te = 50.0f; return t2Image; }
private void RetrieveFrame(Frame frame) { if (_stopAllActivity) { return; } try { //just return if the available memory is getting low - only retrieve and decompress on-demand now. if (SystemResources.GetAvailableMemory(SizeUnits.Megabytes) < Prefetch.Default.AvailableMemoryLimitMegabytes) { return; } Interlocked.Increment(ref _activeRetrieveThreads); //TODO (CR May 2010): do we need to do this all the time? string message = String.Format("Retrieving Frame (active threads: {0})", Thread.VolatileRead(ref _activeRetrieveThreads)); Trace.WriteLine(message); Console.WriteLine(message); //TODO: try to trigger header retrieval for data luts? frame.GetNormalizedPixelData(); } catch(OutOfMemoryException) { _stopAllActivity = true; Platform.Log(LogLevel.Error, "Out of memory trying to retrieve pixel data. Prefetching will not resume unless memory becomes available."); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error retrieving frame pixel data."); } finally { Interlocked.Decrement(ref _activeRetrieveThreads); } }
public SingleFrameDisplaySetDescriptor(ISeriesIdentifier sourceSeries, Frame frame, int position) : base(sourceSeries) { Platform.CheckForNullReference(sourceSeries, "sourceSeries"); Platform.CheckForNullReference(frame, "frame"); _seriesInstanceUid = frame.SeriesInstanceUid; _sopInstanceUid = frame.SopInstanceUid; _frameNumber = frame.FrameNumber; _position = position; if (sourceSeries.SeriesInstanceUid == frame.SeriesInstanceUid) { _suffix = String.Format(SR.SuffixFormatSingleFrameDisplaySet, frame.ParentImageSop.InstanceNumber, _frameNumber); } else { //this is a referenced frame (e.g. key iamge). _suffix = String.Format(SR.SuffixFormatSingleReferencedFrameDisplaySet, frame.ParentImageSop.SeriesNumber, frame.ParentImageSop.InstanceNumber, _frameNumber); } }
public DynamicTePresentationImage( Frame frame, byte[] protonDensityMap, byte[] t2Map, byte[] probabilityMap) : base(frame) { Platform.CheckForNullReference(frame, "imageSop"); _frame = frame; // TODO (Norman): DicomFilteredAnnotationLayoutProvider was made internal. Either need to derive // this class from DicomGrayscalePresentationImage or create a layout provider. //this.AnnotationLayoutProvider = new DicomFilteredAnnotationLayoutProvider(this); AddProbabilityOverlay(); _dynamicTe = new DynamicTe( this.ImageGraphic as GrayscaleImageGraphic, protonDensityMap, t2Map, _probabilityOverlay, probabilityMap); }
public void RetrieveFrame(Frame frame) { Interlocked.Increment(ref _activeRetrieveThreads); try { IStreamingSopDataSource dataSource = (IStreamingSopDataSource) frame.ParentImageSop.DataSource; IStreamingSopFrameData frameData = dataSource.GetFrameData(frame.FrameNumber); frameData.RetrievePixelData(); } catch (OutOfMemoryException) { Platform.Log(LogLevel.Error, "Out of memory trying to retrieve pixel data."); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error retrieving frame pixel data."); } finally { Interlocked.Decrement(ref _activeRetrieveThreads); } }
public void DecompressFrame(Frame frame) { Interlocked.Increment(ref _activeDecompressThreads); try { string message = String.Format("Decompressing Frame (active threads: {0})", Thread.VolatileRead(ref _activeDecompressThreads)); Trace.WriteLine(message); //TODO: try to trigger header retrieval for data luts? frame.GetNormalizedPixelData(); } catch (OutOfMemoryException) { Platform.Log(LogLevel.Error, "Out of memory trying to decompress pixel data."); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Error decompressing frame pixel data."); } finally { Interlocked.Decrement(ref _activeDecompressThreads); } }
public static List<OverlayPlaneGraphic> CreateOverlayPlaneGraphics(Frame frame) { return CreateOverlayPlaneGraphics(frame, null); }
/// <summary> /// Compares two <see cref="Frame"/>s based on acquisition date and time. /// </summary> public override int Compare(Frame x, Frame y) { return Compare(GetCompareValues(x), GetCompareValues(y)); }
/// <summary> /// Initializes Pixel Measures Functional Group /// </summary> /// <param name="functionalGroupsSequenceItem">Functional Group Sequence Item that the newly initialized Pixel Measures Functional Group will belong</param> /// <param name="imageFrame"><see cref="Frame"/> that is a source of group's values</param> private static void InitializePixelMeasureFunctionalGroup(FunctionalGroupsSequenceItem functionalGroupsSequenceItem, Frame imageFrame) { var pixelSpacing = imageFrame.PixelSpacing; if (pixelSpacing.IsNull) return; var pixelMeasureFunctionalGroup = functionalGroupsSequenceItem.GetFunctionalGroup<PixelMeasuresFunctionalGroup>(); var pixelMeasureSequence = pixelMeasureFunctionalGroup.CreatePixelMeasuresSequence(); pixelMeasureSequence.PixelSpacing = new[] {pixelSpacing.Row, pixelSpacing.Column}; pixelMeasureSequence.SliceThickness = imageFrame.SliceThickness; pixelMeasureFunctionalGroup.PixelMeasuresSequence = pixelMeasureSequence; }
/// <summary> /// Initializes a new instance of <see cref="DicomColorPresentationImage"/>. /// </summary> /// <param name="frame">The <see cref="Frame"/> from which to construct the image.</param> /// <remarks> /// This constructor provides a convenient means of associating a <see cref="Frame"/> with a <see cref="ColorPresentationImage"/>. /// </remarks> public DicomColorPresentationImage(Frame frame) : this(frame.CreateTransientReference()) { }
/// <summary> /// Initializes a new instance of <see cref="NormalizedPixelSpacing"/>. /// </summary> internal NormalizedPixelSpacing(Frame frame) { _frame = frame; Initialize(); }
private static bool IsCrossSectionalModality(Frame frame) { // Imager Pixel Spacing definitely does not apply to these modalities switch (frame.ParentImageSop.Modality) { case "CT": case "MR": case "PT": case "NM": return true; } return false; // for safety reasons, we assume everything else might be projectional }
protected internal byte[] GetOverlay(Frame baseFrame, out OverlayFrameParams overlayFrameParams) { var volume = this.Volume; // compute the bounds of the target base image frame in patient coordinates var baseTopLeft = baseFrame.ImagePlaneHelper.ConvertToPatient(new PointF(0, 0)); var baseTopRight = baseFrame.ImagePlaneHelper.ConvertToPatient(new PointF(baseFrame.Columns, 0)); var baseBottomLeft = baseFrame.ImagePlaneHelper.ConvertToPatient(new PointF(0, baseFrame.Rows)); var baseFrameCentre = (baseTopRight + baseBottomLeft)/2; // compute the rotated volume slicing basis axes var volumeXAxis = (volume.ConvertToVolume(baseTopRight) - volume.ConvertToVolume(baseTopLeft)).Normalize(); var volumeYAxis = (volume.ConvertToVolume(baseBottomLeft) - volume.ConvertToVolume(baseTopLeft)).Normalize(); var volumeZAxis = volumeXAxis.Cross(volumeYAxis); var @params = new VolumeSlicerParams(volumeXAxis, volumeYAxis, volumeZAxis); using (var slice = new VolumeSliceSopDataSource(volume, @params, volume.ConvertToVolume(baseFrameCentre))) { using (var sliceSop = new ImageSop(slice)) { using (var overlayFrame = sliceSop.Frames[1]) { // compute the bounds of the target overlay image frame in patient coordinates var overlayTopLeft = overlayFrame.ImagePlaneHelper.ConvertToPatient(new PointF(0, 0)); var overlayTopRight = overlayFrame.ImagePlaneHelper.ConvertToPatient(new PointF(overlayFrame.Columns, 0)); var overlayBottomLeft = overlayFrame.ImagePlaneHelper.ConvertToPatient(new PointF(0, overlayFrame.Rows)); var overlayOffset = overlayTopLeft - baseTopLeft; // compute the overlay and base image resolution in pixels per unit patient space (mm). var overlayResolutionX = overlayFrame.Columns/(overlayTopRight - overlayTopLeft).Magnitude; var overlayResolutionY = overlayFrame.Rows/(overlayBottomLeft - overlayTopLeft).Magnitude; var baseResolutionX = baseFrame.Columns/(baseTopRight - baseTopLeft).Magnitude; var baseResolutionY = baseFrame.Rows/(baseBottomLeft - baseTopLeft).Magnitude; // compute parameters to register the overlay on the base image var scale = new PointF(baseResolutionX/overlayResolutionX, baseResolutionY/overlayResolutionY); var offset = new PointF(overlayOffset.X*overlayResolutionX, overlayOffset.Y*overlayResolutionY); //TODO (CR Sept 2010): could this be negative? // validate computed transform parameters Platform.CheckTrue(overlayOffset.Z < 0.5f, "Compute OffsetZ != 0"); overlayFrameParams = new OverlayFrameParams( overlayFrame.Rows, overlayFrame.Columns, overlayFrame.BitsAllocated, overlayFrame.BitsStored, overlayFrame.HighBit, overlayFrame.PixelRepresentation != 0 ? true : false, overlayFrame.PhotometricInterpretation == PhotometricInterpretation.Monochrome1 ? true : false, overlayFrame.RescaleSlope, overlayFrame.RescaleIntercept, scale, offset); return overlayFrame.GetNormalizedPixelData(); } } } }
public FusionOverlayFrameData CreateOverlaySlice(Frame baseFrame) { return new FusionOverlayFrameData(baseFrame.CreateTransientReference(), this.CreateTransientReference()); }
private static DicomImagePlane CreateFromFrame(Frame frame) { int height = frame.Rows - 1; int width = frame.Columns - 1; DicomImagePlane plane = new DicomImagePlane(); plane.PositionPatientTopLeft = frame.ImagePlaneHelper.ConvertToPatient(new PointF(0, 0)); plane.PositionPatientTopRight = frame.ImagePlaneHelper.ConvertToPatient(new PointF(width, 0)); plane.PositionPatientBottomLeft = frame.ImagePlaneHelper.ConvertToPatient(new PointF(0, height)); plane.PositionPatientBottomRight = frame.ImagePlaneHelper.ConvertToPatient(new PointF(width, height)); plane.PositionPatientCenterOfImage = frame.ImagePlaneHelper.ConvertToPatient(new PointF(width / 2F, height / 2F)); plane.Normal = frame.ImagePlaneHelper.GetNormalVector(); if (plane.Normal == null || plane.PositionPatientCenterOfImage == null) return null; // here, we want the position in the coordinate system of the image plane, // without moving the origin (e.g. leave it at the patient origin). plane.PositionImagePlaneTopLeft = frame.ImagePlaneHelper.ConvertToImagePlane(plane.PositionPatientTopLeft, Vector3D.Null); return plane; }
private static DicomImagePlane CreateFromCache(Frame frame) { string key = String.Format("{0}:{1}", frame.ParentImageSop.SopInstanceUid, frame.FrameNumber); DicomImagePlane cachedData; if (ImagePlaneDataCache.ContainsKey(key)) { cachedData = ImagePlaneDataCache[key]; } else { cachedData = CreateFromFrame(frame); if (cachedData != null) ImagePlaneDataCache[key] = cachedData; } if (cachedData != null) { DicomImagePlane plane = new DicomImagePlane(); plane.InitializeWithCachedData(cachedData); return plane; } return null; }
/// <summary> /// Initializes a new instance of <see cref="DicomGrayscalePresentationImage"/>. /// </summary> /// <param name="frame">The <see cref="Frame"/> from which to construct the image.</param> /// <remarks> /// This constructor provides a convenient means of associating a <see cref="Frame"/> with a <see cref="GrayscalePresentationImage"/>. /// </remarks> public DicomGrayscalePresentationImage(Frame frame) : this(frame.CreateTransientReference()) {}
public static List<OverlayPlaneGraphic> CreateOverlayPlaneGraphics(Frame frame, OverlayPlaneModuleIod overlaysFromPresentationState) { ISopDataSource dataSource = frame.ParentImageSop.DataSource; OverlayPlaneModuleIod overlaysIod = new OverlayPlaneModuleIod(dataSource); List<OverlayPlaneGraphic> overlayPlaneGraphics = new List<OverlayPlaneGraphic>(); bool failedOverlays = false; foreach (var overlayPlane in overlaysIod) { // DICOM 2009 PS 3.3 Section C.9.3.1.1 specifies the rule: NumberOfFramesInOverlay+ImageFrameOrigin-1 must be <= NumberOfFrames if (!overlayPlane.IsValidMultiFrameOverlay(frame.ParentImageSop.NumberOfFrames)) { failedOverlays = true; Platform.Log(LogLevel.Warn, new DicomOverlayDeserializationException(overlayPlane.Group, OverlayPlaneSource.Image), "Encoding error encountered while reading overlay from image headers."); continue; } try { byte[] overlayData = dataSource.GetFrameData(frame.FrameNumber).GetNormalizedOverlayData(overlayPlane.Index + 1); overlayPlaneGraphics.Add(new OverlayPlaneGraphic(overlayPlane, overlayData, OverlayPlaneSource.Image)); // if overlay data is null, the data source failed to retrieve the overlay data for some reason, so we also treat it as an encoding error // this is different from if the overlay data is zero-length, which indicates that the retrieval succeeded, but that the overlay data for the frame is empty if (overlayData == null) throw new NullReferenceException(); } catch (Exception ex) { failedOverlays = true; Platform.Log(LogLevel.Warn, new DicomOverlayDeserializationException(overlayPlane.Group, OverlayPlaneSource.Image, ex), "Failed to load overlay from the image header."); } } if (overlaysFromPresentationState != null) { foreach (var overlayPlane in overlaysFromPresentationState) { // if overlay data is missing, treat as an encoding error if (!overlayPlane.HasOverlayData) { failedOverlays = true; Platform.Log(LogLevel.Warn, new DicomOverlayDeserializationException(overlayPlane.Group, OverlayPlaneSource.PresentationState), "Encoding error encountered while reading overlay from softcopy presentation state."); continue; } try { byte[] overlayData; // try to compute the offset in the OverlayData bit stream where we can find the overlay frame that applies to this image frame int overlayFrame, bitOffset; if (overlayPlane.TryGetRelevantOverlayFrame(frame.FrameNumber, frame.ParentImageSop.NumberOfFrames, out overlayFrame) && overlayPlane.TryComputeOverlayDataBitOffset(overlayFrame, out bitOffset)) { // offset found - unpack only that overlay frame var od = new OverlayData(bitOffset, overlayPlane.OverlayRows, overlayPlane.OverlayColumns, overlayPlane.IsBigEndianOW, overlayPlane.OverlayData); overlayData = od.Unpack(); } else { // no relevant overlay frame found - i.e. the overlay for this image frame is blank overlayData = new byte[0]; } overlayPlaneGraphics.Add(new OverlayPlaneGraphic(overlayPlane, overlayData, OverlayPlaneSource.PresentationState)); } catch (Exception ex) { failedOverlays = true; Platform.Log(LogLevel.Warn, new DicomOverlayDeserializationException(overlayPlane.Group, OverlayPlaneSource.PresentationState, ex), "Failed to load overlay from softcopy presentation state."); } } } if (failedOverlays) { // add an error graphic if any overlays are not being displayed due to deserialization errors. overlayPlaneGraphics.Add(new ErrorOverlayPlaneGraphic(SR.MessageErrorDisplayingOverlays)); } return overlayPlaneGraphics; }