public void Encode(DicomUncompressedPixelData oldPixelData, DicomCompressedPixelData newPixelData, DicomCodecParameters parameters) { if (oldPixelData.UncompressedFrameSize % 2 == 1) { for (var n = 0; n < oldPixelData.NumberOfFrames; ++n) { using (var stream = new MemoryStream()) { var data = oldPixelData.GetFrame(n); stream.Write(data, 0, data.Length); stream.WriteByte(0); // must pad fragments to even length newPixelData.AddFrameFragment(stream.ToArray()); } } } else { for (var n = 0; n < oldPixelData.NumberOfFrames; ++n) { newPixelData.AddFrameFragment(oldPixelData.GetFrame(n)); } } }
/// <summary> /// Called by the base class to create a new byte buffer containing normalized pixel data /// for this frame (8 or 16-bit grayscale, or 32-bit ARGB). /// </summary> /// <returns>A new byte buffer containing the normalized pixel data.</returns> protected override byte[] CreateNormalizedPixelData() { DicomMessageBase message = this.Parent.SourceMessage; CodeClock clock = new CodeClock(); clock.Start(); PhotometricInterpretation photometricInterpretation; byte[] rawPixelData = null; if (!message.TransferSyntax.Encapsulated) { DicomUncompressedPixelData pixelData = new DicomUncompressedPixelData(message); // DICOM library uses zero-based frame numbers MemoryManager.Execute(delegate { rawPixelData = pixelData.GetFrame(_frameIndex); }); ExtractOverlayFrames(rawPixelData, pixelData.BitsAllocated); photometricInterpretation = PhotometricInterpretation.FromCodeString(message.DataSet[DicomTags.PhotometricInterpretation]); } else if (DicomCodecRegistry.GetCodec(message.TransferSyntax) != null) { DicomCompressedPixelData pixelData = new DicomCompressedPixelData(message); string pi = null; MemoryManager.Execute(delegate { rawPixelData = pixelData.GetFrame(_frameIndex, out pi); }); photometricInterpretation = PhotometricInterpretation.FromCodeString(pi); } else { throw new DicomCodecException("Unsupported transfer syntax"); } if (photometricInterpretation.IsColor) { rawPixelData = ToArgb(message.DataSet, rawPixelData, photometricInterpretation); } else { NormalizeGrayscalePixels(message.DataSet, rawPixelData); } clock.Stop(); PerformanceReportBroker.PublishReport("DicomMessageSopDataSource", "CreateFrameNormalizedPixelData", clock.Seconds); return(rawPixelData); }
public void Encode(DicomUncompressedPixelData oldPixelData, DicomCompressedPixelData newPixelData, DicomCodecParameters parameters) { if (oldPixelData.UncompressedFrameSize%2 == 1) { for (var n = 0; n < oldPixelData.NumberOfFrames; ++n) { using (var stream = new MemoryStream()) { var data = oldPixelData.GetFrame(n); stream.Write(data, 0, data.Length); stream.WriteByte(0); // must pad fragments to even length newPixelData.AddFrameFragment(stream.ToArray()); } } } else { for (var n = 0; n < oldPixelData.NumberOfFrames; ++n) { newPixelData.AddFrameFragment(oldPixelData.GetFrame(n)); } } }
public void TestAdvanced_FileOnDisk_16Bits_OW() { var filename = Path.GetTempFileName(); CreateDicomImage(rows: 20, columns: 30, numberOfFrames: 5).Save(filename); var dcf = new DicomFile(filename); dcf.Load(DicomReadOptions.StorePixelDataReferences); try { var pd = new DicomUncompressedPixelData(dcf); Assert.AreEqual(16, pd.BitsAllocated, "BitsAllocated"); Assert.AreEqual(16, pd.BitsStored, "BitsStored"); Assert.AreEqual(15, pd.HighBit, "HighBit"); Assert.AreEqual(20, pd.ImageHeight, "ImageHeight"); Assert.AreEqual(30, pd.ImageWidth, "ImageWidth"); Assert.AreEqual(5, pd.NumberOfFrames, "NumberOfFrames"); Assert.AreEqual("MONOCHROME2", pd.PhotometricInterpretation, "PhotometricInterpretation"); Assert.AreEqual(0, pd.PixelRepresentation, "PixelRepresentation"); Assert.AreEqual(1, pd.SamplesPerPixel, "SamplesPerPixel"); Assert.AreEqual(20 * 30 * 2, pd.UncompressedFrameSize, "UncompressedFrameSize"); var newFrameData = new byte[pd.UncompressedFrameSize]; for (var n = 0; n < newFrameData.Length; ++n) { newFrameData[n] = 0x7F; } pd.SetFrame(1, newFrameData); pd.UpdateAttributeCollection(dcf.DataSet); var pixelData = dcf.DataSet[DicomTags.PixelData].Values as byte[]; for (var frame = 0; frame < 5; ++frame) { var fd = pd.GetFrame(frame); var expectedValue = frame == 1 ? (byte)0x7F : (byte)(0x80 + frame); Assert.AreEqual(pd.UncompressedFrameSize, fd.Length, "PixelData(frame={0}).Length", frame); AssertBytesEqual(expectedValue, fd, "PixelData(frame={0})", frame); AssertBytesEqual(expectedValue, pixelData, frame * pd.UncompressedFrameSize, pd.UncompressedFrameSize, "AttributeValues(frame={0})", frame); } } finally { File.Delete(filename); } }
public void GetPixelData(PrintScu.ImageBox imageBox, ColorMode colorMode, out ushort rows, out ushort columns, out byte[] pixelData) { try { LocalSopDataSource dataSource = _file.DataSource as LocalSopDataSource; dataSource.File.Load(DicomReadOptions.Default | DicomReadOptions.StorePixelDataReferences); rows = dataSource.File.DataSet[DicomTags.Rows].GetUInt16(0, 0); columns = dataSource.File.DataSet[DicomTags.Columns].GetUInt16(0, 0); DicomUncompressedPixelData uncompressedPixelData = new DicomUncompressedPixelData(dataSource.File); pixelData = uncompressedPixelData.GetFrame(0); } catch (Exception e) { Platform.Log(LogLevel.Error, string.Format("获得像素数据失败:{0}", e.Message)); throw e; } }
protected override void Upload(DicomFile dicomObject, int frame, IStorageLocation storeLocation) { var frameIndex = frame - 1; if (dicomObject.TransferSyntax == TransferSyntax.JpegBaselineProcess1) { DicomCompressedPixelData pd = DicomPixelData.CreateFrom(dicomObject) as DicomCompressedPixelData; byte[] buffer = pd.GetFrameFragmentData(frameIndex); storeLocation.Upload(buffer); } else if (false) //TODO: handle compressed images properly! { DicomFile dcmJpeg = new DicomFile( ); DicomUncompressedPixelData unCompressed = DicomPixelData.CreateFrom(dicomObject) as DicomUncompressedPixelData; DicomCompressedPixelData compressed = new DicomCompressedPixelData(unCompressed); //compressed.ImageWidth = unCompressed.ImageWidth; //compressed.ImageHeight = unCompressed.HighBit; compressed.BitsStored = 8; compressed.BitsAllocated = 8; //compressed.HighBit = 7; compressed.SamplesPerPixel = 3; //compressed.PlanarConfiguration = 0; compressed.PhotometricInterpretation = "YBR_FULL_422"; compressed.TransferSyntax = TransferSyntax.JpegBaselineProcess1; byte[] imageBuffer = unCompressed.GetFrame(frameIndex); compressed.AddFrameFragment(imageBuffer); compressed.UpdateMessage(dcmJpeg); storeLocation.Upload(compressed.GetFrame(frameIndex)); //ClearCanvas.Dicom.Codec.Jpeg.Jpeg8Codec codec = new ClearCanvas.Dicom.Codec.Jpeg.Jpeg8Codec (ClearCanvas.Dicom.Codec.Jpeg.JpegMode.Baseline, 0, 0 ) ; //ClearCanvas.Dicom.Codec.Jpeg.DicomJpegParameters jparam = new ClearCanvas.Dicom.Codec.Jpeg.DicomJpegParameters ( ) ; //jparam. //codec. //codec.Encode ( ) } }
public void Encode(DicomUncompressedPixelData oldPixelData, DicomCompressedPixelData newPixelData, DicomCodecParameters parameters) { for (var n = 0; n < oldPixelData.NumberOfFrames; ++n) { using (var output = new MemoryStream()) { using (var gzipStream = new GZipStream(output, CompressionMode.Compress, true)) { var data = oldPixelData.GetFrame(n); gzipStream.Write(data, 0, data.Length); } // if the compressed stream is odd length, append an extra byte - gzip will know that it's padding during decompression if (output.Length%2 == 1) output.WriteByte(0); newPixelData.AddFrameFragment(output.ToArray()); } } }
public void TestBasic_FileOnDisk_8Bits_BigEndian_OddFrameLength() { var filename = Path.GetTempFileName(); CreateDicomImage(rows: 19, columns: 29, numberOfFrames: 5, bitsAllocated16: false, endian: Endian.Big).Save(filename); var dcf = new DicomFile(filename); dcf.Load(DicomReadOptions.StorePixelDataReferences); try { var pd = new DicomUncompressedPixelData(dcf); Assert.AreEqual(8, pd.BitsAllocated, "BitsAllocated"); Assert.AreEqual(8, pd.BitsStored, "BitsStored"); Assert.AreEqual(7, pd.HighBit, "HighBit"); Assert.AreEqual(19, pd.ImageHeight, "ImageHeight"); Assert.AreEqual(29, pd.ImageWidth, "ImageWidth"); Assert.AreEqual(5, pd.NumberOfFrames, "NumberOfFrames"); Assert.AreEqual("MONOCHROME2", pd.PhotometricInterpretation, "PhotometricInterpretation"); Assert.AreEqual(0, pd.PixelRepresentation, "PixelRepresentation"); Assert.AreEqual(1, pd.SamplesPerPixel, "SamplesPerPixel"); Assert.AreEqual(19 * 29, pd.UncompressedFrameSize, "UncompressedFrameSize"); for (var frame = 0; frame < 5; ++frame) { if (frame == 4) { Assert.Ignore("Skipping last frame validation due to bug #10749"); } var fd = pd.GetFrame(frame); Assert.AreEqual(pd.UncompressedFrameSize, fd.Length, "PixelData(frame={0}).Length", frame); AssertBytesEqual((byte)(0x80 + frame), fd, "PixelData(frame={0})", frame); } } finally { File.Delete(filename); } }
public void Encode(DicomUncompressedPixelData oldPixelData, DicomCompressedPixelData newPixelData, DicomCodecParameters parameters) { for (var n = 0; n < oldPixelData.NumberOfFrames; ++n) { using (var output = new MemoryStream()) { using (var gzipStream = new GZipStream(output, CompressionMode.Compress, true)) { var data = oldPixelData.GetFrame(n); gzipStream.Write(data, 0, data.Length); } // if the compressed stream is odd length, append an extra byte - gzip will know that it's padding during decompression if (output.Length % 2 == 1) { output.WriteByte(0); } newPixelData.AddFrameFragment(output.ToArray()); } } }
public void TestBasic_MessageInMemory_8Bits_OB() { var dcf = CreateDicomImage(rows : 20, columns : 30, numberOfFrames : 3, bitsAllocated16 : false, useOB : true); var pd = new DicomUncompressedPixelData(dcf); Assert.AreEqual(8, pd.BitsAllocated, "BitsAllocated"); Assert.AreEqual(8, pd.BitsStored, "BitsStored"); Assert.AreEqual(7, pd.HighBit, "HighBit"); Assert.AreEqual(20, pd.ImageHeight, "ImageHeight"); Assert.AreEqual(30, pd.ImageWidth, "ImageWidth"); Assert.AreEqual(3, pd.NumberOfFrames, "NumberOfFrames"); Assert.AreEqual("MONOCHROME2", pd.PhotometricInterpretation, "PhotometricInterpretation"); Assert.AreEqual(0, pd.PixelRepresentation, "PixelRepresentation"); Assert.AreEqual(1, pd.SamplesPerPixel, "SamplesPerPixel"); Assert.AreEqual(20*30, pd.UncompressedFrameSize, "UncompressedFrameSize"); for (var frame = 0; frame < 3; ++frame) { var fd = pd.GetFrame(frame); Assert.AreEqual(pd.UncompressedFrameSize, fd.Length, "PixelData(frame={0}).Length", frame); AssertBytesEqual((byte) (0x80 + frame), fd, "PixelData(frame={0})", frame); } }
private void CheckPixels(string filename) { DicomReadOptions readOptions = DicomReadOptions.StorePixelDataReferences; DicomFile newFile = new DicomFile(filename); newFile.Load(readOptions); DicomUncompressedPixelData pd = new DicomUncompressedPixelData(newFile.DataSet); for (int frame = 0; frame < pd.NumberOfFrames; frame++) { byte[] data = pd.GetFrame(frame); uint pdVal = (uint)frame + 1; for (int i = 0; i < pd.UncompressedFrameSize; i++, pdVal++) { if (data[i] != pdVal % 255) { string val = String.Format("Value bad: frame: {0}, pixel: {1}, val1: {2}, val2: {3}", frame, i, data[i], pdVal % 255); Console.Write(val); } Assert.AreEqual(data[i], pdVal % 255); } } }
public void TestBasic_DatasetInMemory_8Bits() { var dcf = CreateDicomImage(rows: 20, columns: 30, numberOfFrames: 3, bitsAllocated16: false, signed: true); var pd = new DicomUncompressedPixelData(dcf.DataSet); Assert.AreEqual(8, pd.BitsAllocated, "BitsAllocated"); Assert.AreEqual(8, pd.BitsStored, "BitsStored"); Assert.AreEqual(7, pd.HighBit, "HighBit"); Assert.AreEqual(20, pd.ImageHeight, "ImageHeight"); Assert.AreEqual(30, pd.ImageWidth, "ImageWidth"); Assert.AreEqual(3, pd.NumberOfFrames, "NumberOfFrames"); Assert.AreEqual("MONOCHROME2", pd.PhotometricInterpretation, "PhotometricInterpretation"); Assert.AreEqual(1, pd.PixelRepresentation, "PixelRepresentation"); Assert.AreEqual(1, pd.SamplesPerPixel, "SamplesPerPixel"); Assert.AreEqual(20 * 30, pd.UncompressedFrameSize, "UncompressedFrameSize"); for (var frame = 0; frame < 3; ++frame) { var fd = pd.GetFrame(frame); Assert.AreEqual(pd.UncompressedFrameSize, fd.Length, "PixelData(frame={0}).Length", frame); AssertBytesEqual((byte)(0x80 + frame), fd, "PixelData(frame={0})", frame); } }
public void TestBasic_FileOnDisk_16Bits_BigEndian() { var filename = Path.GetTempFileName(); CreateDicomImage(rows: 20, columns: 30, numberOfFrames: 3, endian: Endian.Big).Save(filename); var dcf = new DicomFile(filename); dcf.Load(DicomReadOptions.StorePixelDataReferences); try { var pd = new DicomUncompressedPixelData(dcf); Assert.AreEqual(16, pd.BitsAllocated, "BitsAllocated"); Assert.AreEqual(16, pd.BitsStored, "BitsStored"); Assert.AreEqual(15, pd.HighBit, "HighBit"); Assert.AreEqual(20, pd.ImageHeight, "ImageHeight"); Assert.AreEqual(30, pd.ImageWidth, "ImageWidth"); Assert.AreEqual(3, pd.NumberOfFrames, "NumberOfFrames"); Assert.AreEqual("MONOCHROME2", pd.PhotometricInterpretation, "PhotometricInterpretation"); Assert.AreEqual(0, pd.PixelRepresentation, "PixelRepresentation"); Assert.AreEqual(1, pd.SamplesPerPixel, "SamplesPerPixel"); Assert.AreEqual(20 * 30 * 2, pd.UncompressedFrameSize, "UncompressedFrameSize"); for (var frame = 0; frame < 3; ++frame) { var fd = pd.GetFrame(frame); Assert.AreEqual(pd.UncompressedFrameSize, fd.Length, "PixelData(frame={0}).Length", frame); AssertBytesEqual((byte)(0x80 + frame), fd, "PixelData(frame={0})", frame); } } finally { File.Delete(filename); } }
public void Encode(DicomUncompressedPixelData oldPixelData, DicomCompressedPixelData newPixelData, DicomCodecParameters parameters) { DicomRleCodecParameters rleParams = parameters as DicomRleCodecParameters; if (rleParams == null) throw new DicomCodecException("Unexpected RLE Codec parameters"); // Convert to RGB if (oldPixelData.HasPaletteColorLut && parameters.ConvertPaletteToRGB) { oldPixelData.ConvertPaletteColorToRgb(); newPixelData.HasPaletteColorLut = false; newPixelData.SamplesPerPixel = oldPixelData.SamplesPerPixel; newPixelData.PlanarConfiguration = oldPixelData.PlanarConfiguration; newPixelData.PhotometricInterpretation = oldPixelData.PhotometricInterpretation; } int pixelCount = oldPixelData.ImageWidth * oldPixelData.ImageHeight; int numberOfSegments = oldPixelData.BytesAllocated * oldPixelData.SamplesPerPixel; for (int i = 0; i < oldPixelData.NumberOfFrames; i++) { RLEEncoder encoder = new RLEEncoder(); byte[] frameData = oldPixelData.GetFrame(i); for (int s = 0; s < numberOfSegments; s++) { encoder.NextSegment(); int sample = s / oldPixelData.BytesAllocated; int sabyte = s % oldPixelData.BytesAllocated; int pos; int offset; if (newPixelData.PlanarConfiguration == 0) { pos = sample * oldPixelData.BytesAllocated; offset = numberOfSegments; } else { pos = sample * oldPixelData.BytesAllocated * pixelCount; offset = oldPixelData.BytesAllocated; } if (rleParams.ReverseByteOrder) pos += sabyte; else pos += oldPixelData.BytesAllocated - sabyte - 1; for (int p = 0; p < pixelCount; p++) { if (pos >= frameData.Length) throw new DicomCodecException(""); encoder.Encode(frameData[pos]); pos += offset; } encoder.Flush(); } encoder.MakeEvenLength(); newPixelData.AddFrameFragment(encoder.GetBuffer()); } }
public void TestAdvanced_FileOnDisk_16Bits_OW() { var filename = Path.GetTempFileName(); CreateDicomImage(rows : 20, columns : 30, numberOfFrames : 5).Save(filename); var dcf = new DicomFile(filename); dcf.Load(DicomReadOptions.StorePixelDataReferences); try { var pd = new DicomUncompressedPixelData(dcf); Assert.AreEqual(16, pd.BitsAllocated, "BitsAllocated"); Assert.AreEqual(16, pd.BitsStored, "BitsStored"); Assert.AreEqual(15, pd.HighBit, "HighBit"); Assert.AreEqual(20, pd.ImageHeight, "ImageHeight"); Assert.AreEqual(30, pd.ImageWidth, "ImageWidth"); Assert.AreEqual(5, pd.NumberOfFrames, "NumberOfFrames"); Assert.AreEqual("MONOCHROME2", pd.PhotometricInterpretation, "PhotometricInterpretation"); Assert.AreEqual(0, pd.PixelRepresentation, "PixelRepresentation"); Assert.AreEqual(1, pd.SamplesPerPixel, "SamplesPerPixel"); Assert.AreEqual(20*30*2, pd.UncompressedFrameSize, "UncompressedFrameSize"); var newFrameData = new byte[pd.UncompressedFrameSize]; for (var n = 0; n < newFrameData.Length; ++n) newFrameData[n] = 0x7F; pd.SetFrame(1, newFrameData); pd.UpdateAttributeCollection(dcf.DataSet); var pixelData = dcf.DataSet[DicomTags.PixelData].Values as byte[]; for (var frame = 0; frame < 5; ++frame) { var fd = pd.GetFrame(frame); var expectedValue = frame == 1 ? (byte) 0x7F : (byte) (0x80 + frame); Assert.AreEqual(pd.UncompressedFrameSize, fd.Length, "PixelData(frame={0}).Length", frame); AssertBytesEqual(expectedValue, fd, "PixelData(frame={0})", frame); AssertBytesEqual(expectedValue, pixelData, frame*pd.UncompressedFrameSize, pd.UncompressedFrameSize, "AttributeValues(frame={0})", frame); } } finally { File.Delete(filename); } }
public void TestBasic_FileOnDisk_16Bits_BigEndian_OddFrameLength() { var filename = Path.GetTempFileName(); CreateDicomImage(rows : 19, columns : 29, numberOfFrames : 5, endian : Endian.Big).Save(filename); var dcf = new DicomFile(filename); dcf.Load(DicomReadOptions.StorePixelDataReferences); try { var pd = new DicomUncompressedPixelData(dcf); Assert.AreEqual(16, pd.BitsAllocated, "BitsAllocated"); Assert.AreEqual(16, pd.BitsStored, "BitsStored"); Assert.AreEqual(15, pd.HighBit, "HighBit"); Assert.AreEqual(19, pd.ImageHeight, "ImageHeight"); Assert.AreEqual(29, pd.ImageWidth, "ImageWidth"); Assert.AreEqual(5, pd.NumberOfFrames, "NumberOfFrames"); Assert.AreEqual("MONOCHROME2", pd.PhotometricInterpretation, "PhotometricInterpretation"); Assert.AreEqual(0, pd.PixelRepresentation, "PixelRepresentation"); Assert.AreEqual(1, pd.SamplesPerPixel, "SamplesPerPixel"); Assert.AreEqual(19*29*2, pd.UncompressedFrameSize, "UncompressedFrameSize"); for (var frame = 0; frame < 5; ++frame) { var fd = pd.GetFrame(frame); Assert.AreEqual(pd.UncompressedFrameSize, fd.Length, "PixelData(frame={0}).Length", frame); AssertBytesEqual((byte) (0x80 + frame), fd, "PixelData(frame={0})", frame); } } finally { File.Delete(filename); } }
public void TestBasic_FileOnDisk_8Bits_OB() { var filename = Path.GetTempFileName(); CreateDicomImage(rows : 20, columns : 30, numberOfFrames : 3, bitsAllocated16 : false, useOB : true).Save(filename); var dcf = new DicomFile(filename); dcf.Load(DicomReadOptions.StorePixelDataReferences); try { var pd = new DicomUncompressedPixelData(dcf); Assert.AreEqual(8, pd.BitsAllocated, "BitsAllocated"); Assert.AreEqual(8, pd.BitsStored, "BitsStored"); Assert.AreEqual(7, pd.HighBit, "HighBit"); Assert.AreEqual(20, pd.ImageHeight, "ImageHeight"); Assert.AreEqual(30, pd.ImageWidth, "ImageWidth"); Assert.AreEqual(3, pd.NumberOfFrames, "NumberOfFrames"); Assert.AreEqual("MONOCHROME2", pd.PhotometricInterpretation, "PhotometricInterpretation"); Assert.AreEqual(0, pd.PixelRepresentation, "PixelRepresentation"); Assert.AreEqual(1, pd.SamplesPerPixel, "SamplesPerPixel"); Assert.AreEqual(20*30, pd.UncompressedFrameSize, "UncompressedFrameSize"); for (var frame = 0; frame < 3; ++frame) { var fd = pd.GetFrame(frame); Assert.AreEqual(pd.UncompressedFrameSize, fd.Length, "PixelData(frame={0}).Length", frame); AssertBytesEqual((byte) (0x80 + frame), fd, "PixelData(frame={0})", frame); } } finally { File.Delete(filename); } }
/// <summary> /// Fills the <see cref="OverlayData"/> property with the overlay(s) that had been encoded /// in the <see cref="DicomTags.PixelData"/> of the SOP Instance. If the image is a /// multi-frame, overlay data is extracted from all the frames. /// </summary> /// <param name="pd">The pixel data that contains the encoded overlay(s).</param> /// <exception cref="DicomException">Thrown if <paramref name="pd"/> is not a valid source of embedded overlay data.</exception> /// <returns>True if the <see cref="OverlayData"/> was populated with data encoded in the pixel data; False if <see cref="OverlayData"/> is not empty.</returns> public unsafe bool ExtractEmbeddedOverlay(DicomUncompressedPixelData pd) { byte[] overlayData = this.OverlayData; if (overlayData != null && overlayData.Length > 0) { return(false); } // General sanity checks if (pd.SamplesPerPixel > 1) { throw new DicomException("Unable to convert embedded overlays when Samples Per Pixel > 1"); } if (pd.BitsStored == 8 && pd.BitsAllocated == 8) { throw new DicomException("Unable to remove overlay with 8 Bits Stored and 8 Bits Allocated"); } if (pd.BitsStored == 16 && pd.BitsAllocated == 16) { throw new DicomException("Unable to remove overlay with 16 Bits Stored and 16 Bits Allocated"); } if (OverlayBitPosition <= pd.HighBit && OverlayBitPosition >= pd.LowBit) { throw new DicomException(String.Format("Invalid overlay bit position ({0}); overlay would be in the middle of the pixel data.", OverlayBitPosition)); } int frameSize = pd.UncompressedFrameSize; int overlayDataLength = (int)Math.Ceiling((frameSize * pd.NumberOfFrames) / (pd.BitsAllocated * 1d)); int frameLength = frameSize / pd.BytesAllocated; // Ensure even length overlay if (overlayDataLength % 2 == 1) { overlayDataLength++; } overlayData = new byte[overlayDataLength]; int overlayOffset = 0; byte overlayMask = 0x01; if (pd.BitsAllocated <= 8) { var embeddedOverlayMask = ((byte)(0x1 << OverlayBitPosition)); // Embededded overlays must exist for all frames, they can't be for a subset for (int i = 0; i < pd.NumberOfFrames; i++) { byte[] frameData = pd.GetFrame(i); ExtractEmbeddedOverlay(frameData, frameLength, embeddedOverlayMask, overlayData, ref overlayOffset, ref overlayMask); pd.SetFrame(i, frameData); } } else { var embeddedOverlayMask = ((ushort)(0x1 << OverlayBitPosition)); // Embededded overlays must exist for all frames, they can't be for a subset for (int i = 0; i < pd.NumberOfFrames; i++) { byte[] frameData = pd.GetFrame(i); ExtractEmbeddedOverlay(frameData, frameLength, embeddedOverlayMask, overlayData, ref overlayOffset, ref overlayMask); pd.SetFrame(i, frameData); } } pd.UpdatePixelDataAttribute(); // Assign the new overlay tags this.OverlayBitPosition = 0; this.OverlayBitsAllocated = 1; if (this.IsBigEndianOW) { // Just do a bulk swap, performance isn't much of an issue. ByteBuffer buffer = new ByteBuffer(overlayData, Endian.Little); buffer.Swap2(); this.OverlayData = buffer.ToBytes(); } else { this.OverlayData = overlayData; } // Cleanup Rows/Columns if necessary if (this.OverlayColumns == 0) { this.OverlayColumns = pd.ImageWidth; } if (this.OverlayRows == 0) { this.OverlayRows = pd.ImageHeight; } return(true); }
public void Encode(DicomUncompressedPixelData oldPixelData, DicomCompressedPixelData newPixelData, DicomCodecParameters parameters) { DicomRleCodecParameters rleParams = parameters as DicomRleCodecParameters ?? new DicomRleCodecParameters(); // Convert to RGB if (oldPixelData.HasPaletteColorLut && parameters.ConvertPaletteToRGB) { oldPixelData.ConvertPaletteColorToRgb(); newPixelData.HasPaletteColorLut = false; newPixelData.SamplesPerPixel = oldPixelData.SamplesPerPixel; newPixelData.PlanarConfiguration = oldPixelData.PlanarConfiguration; newPixelData.PhotometricInterpretation = oldPixelData.PhotometricInterpretation; } int pixelCount = oldPixelData.ImageWidth * oldPixelData.ImageHeight; int numberOfSegments = oldPixelData.BytesAllocated * oldPixelData.SamplesPerPixel; for (int i = 0; i < oldPixelData.NumberOfFrames; i++) { RLEEncoder encoder = new RLEEncoder(); byte[] frameData = oldPixelData.GetFrame(i); for (int s = 0; s < numberOfSegments; s++) { encoder.NextSegment(); int sample = s / oldPixelData.BytesAllocated; int sabyte = s % oldPixelData.BytesAllocated; int pos; int offset; if (newPixelData.PlanarConfiguration == 0) { pos = sample * oldPixelData.BytesAllocated; offset = numberOfSegments; } else { pos = sample * oldPixelData.BytesAllocated * pixelCount; offset = oldPixelData.BytesAllocated; } if (rleParams.ReverseByteOrder) { pos += sabyte; } else { pos += oldPixelData.BytesAllocated - sabyte - 1; } for (int p = 0; p < pixelCount; p++) { if (pos >= frameData.Length) { throw new DicomCodecException(""); } encoder.Encode(frameData[pos]); pos += offset; } encoder.Flush(); } encoder.MakeEvenLength(); newPixelData.AddFrameFragment(encoder.GetBuffer()); } }
private void CheckPixels(string filename) { DicomReadOptions readOptions = DicomReadOptions.StorePixelDataReferences; DicomFile newFile = new DicomFile(filename); newFile.Load(readOptions); DicomUncompressedPixelData pd = new DicomUncompressedPixelData(newFile.DataSet); for (int frame = 0; frame < pd.NumberOfFrames; frame++) { byte[] data = pd.GetFrame(frame); uint pdVal = (uint)frame + 1; for (int i = 0; i < pd.UncompressedFrameSize; i++, pdVal++) { if (data[i] != pdVal%255) { string val = String.Format("Value bad: frame: {0}, pixel: {1}, val1: {2}, val2: {3}", frame, i, data[i], pdVal%255); Console.Write(val); } Assert.AreEqual(data[i], pdVal%255); } } }
/// <summary> /// Fills the <see cref="OverlayData"/> property with the overlay that had been encoded /// in the <see cref="DicomTags.PixelData"/> of the SOP Instance. /// </summary> /// <param name="pd">The pixel data that contains the encoded overlay.</param> /// <exception cref="DicomException">Thrown if <paramref name="pd"/> is not a valid source of embedded overlay data.</exception> /// <returns>True if the <see cref="OverlayData"/> was populated with data encoded in the pixel data; False if <see cref="OverlayData"/> is not empty.</returns> public unsafe bool ConvertEmbeddedOverlay(DicomUncompressedPixelData pd) { byte[] oldOverlayData = this.OverlayData; if (oldOverlayData != null && oldOverlayData.Length > 0) { return(false); } // General sanity checks if (pd.SamplesPerPixel > 1) { throw new DicomException("Unable to convert embedded overlays when Samples Per Pixel > 1"); } if (pd.BitsStored == 8 && pd.BitsAllocated == 8) { throw new DicomException("Unable to remove overlay with 8 Bits Stored and 8 Bits Allocated"); } if (pd.BitsStored == 16 && pd.BitsAllocated == 16) { throw new DicomException("Unable to remove overlay with 16 Bits Stored and 16 Bits Allocated"); } int frameSize = pd.UncompressedFrameSize; int overlaySize = frameSize / pd.BitsAllocated; if (frameSize % pd.BitsAllocated > 0) { overlaySize++; } int numValues = frameSize / pd.BytesAllocated; byte[] overlay = new byte[overlaySize]; int overlayOffset = 0; // Embededded overlays must exist for all frames, they can't be for a subset for (int i = 0; i < pd.NumberOfFrames; i++) { byte[] frameData = pd.GetFrame(i); if (pd.BitsAllocated <= 8) { byte pixelMask = ((byte)(0x1 << this.OverlayBitPosition)); byte overlayMask = 0x01; fixed(byte *pFrameData = frameData) { byte *pixelData = pFrameData; for (int p = 0; p < numValues; p++, pixelData++) { if ((*pixelData & pixelMask) != 0) { overlay[overlayOffset] |= overlayMask; *pixelData &= (byte)~pixelMask; } if (overlayMask == 0x80) { overlayMask = 0x01; overlayOffset++; } else { overlayMask <<= 1; } } } } else { fixed(byte *pFrameData = frameData) { ushort pixelMask = ((ushort)(0x1 << OverlayBitPosition)); byte overlayMask = 0x01; ushort *pixelData = (ushort *)pFrameData; for (int p = 0; p < numValues; p++, pixelData++) { if ((*pixelData & pixelMask) != 0) { overlay[overlayOffset] |= overlayMask; *pixelData &= (ushort)~pixelMask; } if (overlayMask == 0x80) { overlayMask = 0x01; overlayOffset++; } else { overlayMask <<= 1; } } } } } // Assign the new overlay tags this.OverlayBitPosition = 0; this.OverlayBitsAllocated = 1; if (this.IsBigEndianOW) { // Just do a bulk swap, performance isn't much of an issue. ByteBuffer buffer = new ByteBuffer(overlay, Endian.Little); buffer.Swap2(); this.OverlayData = buffer.ToBytes(); } else { this.OverlayData = overlay; } // Cleanup Rows/Columns if necessary if (this.OverlayColumns == 0) { this.OverlayColumns = pd.ImageWidth; } if (this.OverlayRows == 0) { this.OverlayRows = pd.ImageHeight; } return(true); }