예제 #1
0
 /// <summary>
 /// Validates that the photometric interpretation is not unknown.
 /// </summary>
 /// <param name="photometricInterpretation"></param>
 public static void ValidatePhotometricInterpretation(PhotometricInterpretation photometricInterpretation)
 {
     if (photometricInterpretation == PhotometricInterpretation.Unknown)
     {
         throw new DicomDataException(String.Format(SR.ExceptionInvalidPhotometricInterpretation, photometricInterpretation));
     }
 }
예제 #2
0
        public static IPixelData Create(DcmPixelData pixelData, int frame)
        {
            PhotometricInterpretation pi = PhotometricInterpretation.Lookup(pixelData.PhotometricInterpretation);

            if (pi == PhotometricInterpretation.Monochrome1 || pi == PhotometricInterpretation.Monochrome2 || pi == PhotometricInterpretation.PaletteColor)
            {
                if (pixelData.BitsStored <= 8)
                {
                    return(new GrayscalePixelDataU8(pixelData.ImageWidth, pixelData.ImageHeight, pixelData.GetFrameDataU8(frame)));
                }
                else if (pixelData.BitsStored <= 16)
                {
                    if (pixelData.IsSigned)
                    {
                        return(new GrayscalePixelDataS16(pixelData.ImageWidth, pixelData.ImageHeight, pixelData.GetFrameDataS16(frame)));
                    }
                    else
                    {
                        return(new GrayscalePixelDataU16(pixelData.ImageWidth, pixelData.ImageHeight, pixelData.GetFrameDataU16(frame)));
                    }
                }
                else
                {
                    throw new DicomImagingException("Unsupported pixel data value for bits stored: {0}", pixelData.BitsStored);
                }
            }
            else if (pi == PhotometricInterpretation.Rgb || pi == PhotometricInterpretation.YbrFull)
            {
                return(new ColorPixelData24(pixelData.ImageWidth, pixelData.ImageHeight, pixelData.GetFrameDataU8(frame)));
            }
            else
            {
                throw new DicomImagingException("Unsupported pixel data photometric interpretation: {0}", pi.Value);
            }
        }
        public void ConvertPaletteColorToRgb()
        {
            Platform.CheckTrue(PhotometricInterpretation.Equals("PALETTE COLOR"), "Photometric Interpretation Palette Color Check");

            List <FrameData> frames = new List <FrameData>();

            for (int i = 0; i < NumberOfFrames; i++)
            {
                byte[] currentFrame = GetFrame(i);
                byte[] newFrame     = new byte[UncompressedFrameSize * 3];

                PaletteColorToRgb(BitsAllocated, IsSigned, currentFrame, newFrame, PaletteColorLut);
                frames.Add(new FrameDataBytes(this, newFrame, false));
            }

            // change the Pixel Data element so we don't affect the original
            _pd = _pd.Tag.VR.CreateDicomAttribute(_pd.Tag);

            _fd.Clear();
            _fd.AddRange(frames);

            SamplesPerPixel           = 3;
            PhotometricInterpretation = "RGB";
            PlanarConfiguration       = 0;
            HasPaletteColorLut        = false;
        }
 public static NativePixelData ToNativePixelData(this DicomPixelData dicomPixelData)
 {
     return(new NativePixelData
     {
         NumberOfFrames = dicomPixelData.NumberOfFrames,
         Width = dicomPixelData.Width,
         Height = dicomPixelData.Height,
         SamplesPerPixel = dicomPixelData.SamplesPerPixel,
         HighBit = dicomPixelData.HighBit,
         BitsStored = dicomPixelData.BitsStored,
         BitsAllocated = dicomPixelData.BitsAllocated,
         BytesAllocated = dicomPixelData.BytesAllocated,
         UncompressedFrameSize = dicomPixelData.UncompressedFrameSize,
         PlanarConfiguration = (int)dicomPixelData.PlanarConfiguration,
         PixelRepresentation = (int)dicomPixelData.PixelRepresentation,
         TransferSyntaxIsLossy = dicomPixelData.Syntax.IsLossy,
         PhotometricInterpretation = dicomPixelData.PhotometricInterpretation.Value,
         GetFrameImpl = index => dicomPixelData.GetFrame(index).Data,
         AddFrameImpl = buffer => dicomPixelData.AddFrame(new MemoryByteBuffer(buffer)),
         SetPlanarConfigurationImpl =
             value => dicomPixelData.PlanarConfiguration = (PlanarConfiguration)value,
         SetPhotometricInterpretationImpl =
             value =>
             dicomPixelData.PhotometricInterpretation =
                 PhotometricInterpretation.Parse(value)
     });
 }
예제 #5
0
    // Create a 256 x 256 Secondary Capture Image Storage
    static private void CreateSmallDICOM(string fileName)
    {
        using (var writer = new gdcm.PixmapWriter())
        {
            gdcm.Pixmap img = writer.GetImage();
            img.SetNumberOfDimensions(3);
            img.SetDimension(0, 512);
            img.SetDimension(1, 512);
            img.SetDimension(2, 2); // fake a 3d volume
            PhotometricInterpretation pi = new PhotometricInterpretation(PhotometricInterpretation.PIType.MONOCHROME2);
            img.SetPhotometricInterpretation(pi);
            gdcm.DataElement pixeldata = new gdcm.DataElement(new gdcm.Tag(0x7fe0, 0x0010));
            byte[]           buffer    = new byte[512 * 512 * 2];
            pixeldata.SetByteValue(buffer, new gdcm.VL((uint)buffer.Length));
            img.SetDataElement(pixeldata);

            gdcm.File        file         = writer.GetFile();
            gdcm.DataSet     ds           = file.GetDataSet();
            gdcm.DataElement ms           = new gdcm.DataElement(new gdcm.Tag(0x0008, 0x0016));
            string           mediastorage = "1.2.840.10008.5.1.4.1.1.7.2"; // Multi-frame Grayscale Byte Secondary Capture Image Storage
            byte[]           val          = StrToByteArray(mediastorage);
            ms.SetByteValue(val, new gdcm.VL((uint)val.Length));
            ds.Insert(ms);

            writer.SetFileName(fileName);
            writer.Write();
        }
    }
예제 #6
0
  // Create a 256 x 256 Secondary Capture Image Storage
  static private void CreateSmallDICOM(string fileName)
    {
    using( var writer = new gdcm.PixmapWriter() )
      {
      gdcm.Pixmap img = writer.GetImage();
      img.SetNumberOfDimensions( 3 );
      img.SetDimension(0, 512 );
      img.SetDimension(1, 512 );
      img.SetDimension(2, 2 ); // fake a 3d volume
      PhotometricInterpretation pi = new PhotometricInterpretation( PhotometricInterpretation.PIType.MONOCHROME2 );
      img.SetPhotometricInterpretation( pi );
      gdcm.DataElement pixeldata = new gdcm.DataElement( new gdcm.Tag(0x7fe0,0x0010) );
      byte[] buffer = new byte[ 512 * 512 * 2 ];
      pixeldata.SetByteValue( buffer, new gdcm.VL((uint)buffer.Length) );
      img.SetDataElement( pixeldata );

      gdcm.File file = writer.GetFile();
      gdcm.DataSet ds = file.GetDataSet();
      gdcm.DataElement ms = new gdcm.DataElement(new gdcm.Tag(0x0008,0x0016));
      string mediastorage = "1.2.840.10008.5.1.4.1.1.7.2"; // Multi-frame Grayscale Byte Secondary Capture Image Storage
      byte[] val = StrToByteArray(mediastorage);
      ms.SetByteValue( val, new gdcm.VL( (uint)val.Length) );
      ds.Insert( ms );

      writer.SetFileName( fileName );
      writer.Write();
      }
    }
예제 #7
0
 private static JpegColorSpace GetJpegColorSpace(PhotometricInterpretation photometricInterpretation)
 {
     if (photometricInterpretation == PhotometricInterpretation.Rgb)
     {
         return(JpegColorSpace.sRGB);
     }
     else if (photometricInterpretation == PhotometricInterpretation.Monochrome1 ||
              photometricInterpretation == PhotometricInterpretation.Monochrome2)
     {
         return(JpegColorSpace.GreyScale);
     }
     else if (photometricInterpretation == PhotometricInterpretation.PaletteColor)
     {
         return(JpegColorSpace.GreyScale);
     }
     else if (photometricInterpretation == PhotometricInterpretation.YbrFull ||
              photometricInterpretation == PhotometricInterpretation.YbrFull422 ||
              photometricInterpretation == PhotometricInterpretation.YbrPartial422)
     {
         return(JpegColorSpace.sYCC);
     }
     else
     {
         return(JpegColorSpace.Unknown);
     }
 }
예제 #8
0
        private static void YbrTripletToArgb(
            byte[] ybrPixelData,
            byte[] argbPixelData,
            int sizeInPixels,
            PhotometricInterpretation photometricInterpretation)
        {
            fixed(byte *pYbrPixelData = ybrPixelData)
            {
                fixed(byte *pArgbPixelData = argbPixelData)
                {
                    int src = 0;
                    int dst = 0;

                    YbrToRgb converter = GetYbrToRgbConverter(photometricInterpretation);

                    for (int i = 0; i < sizeInPixels; i++)
                    {
                        int rgb = converter(
                            pYbrPixelData[src],
                            pYbrPixelData[src + 1],
                            pYbrPixelData[src + 2]);

                        pArgbPixelData[dst]     = Color.FromArgb(rgb).B;
                        pArgbPixelData[dst + 1] = Color.FromArgb(rgb).G;
                        pArgbPixelData[dst + 2] = Color.FromArgb(rgb).R;
                        pArgbPixelData[dst + 3] = 0xff;

                        src += 3;
                        dst += 4;
                    }
                }
            }
        }
예제 #9
0
        private static YbrToRgb GetYbrToRgbConverter(PhotometricInterpretation photometricInterpretation)
        {
            YbrToRgb converter;

            if (photometricInterpretation == PhotometricInterpretation.YbrFull)
            {
                converter = new YbrToRgb(YbrFullToRgb);
            }
            else if (photometricInterpretation == PhotometricInterpretation.YbrFull422)
            {
                converter = new YbrToRgb(YbrFull422ToRgb);
            }
            else if (photometricInterpretation == PhotometricInterpretation.YbrIct)
            {
                converter = new YbrToRgb(YbrIctToRgb);
            }
            else if (photometricInterpretation == PhotometricInterpretation.YbrPartial422)
            {
                converter = new YbrToRgb(YbrPartial422ToRgb);
            }
            else
            {
                converter = new YbrToRgb(YbrRctToRgb);
            }

            return(converter);
        }
예제 #10
0
파일: IPipeline.cs 프로젝트: xiaotie/mdcm
        public static IPipeline Create(DcmDataset dataset, DcmPixelData pixelData)
        {
            PhotometricInterpretation pi = PhotometricInterpretation.Lookup(pixelData.PhotometricInterpretation);

            if (pi == PhotometricInterpretation.Monochrome1 || pi == PhotometricInterpretation.Monochrome2)
            {
                GenericGrayscalePipeline pipeline = new GenericGrayscalePipeline(pixelData.RescaleSlope, pixelData.RescaleIntercept, pixelData.BitsStored, pixelData.IsSigned);
                if (pi == PhotometricInterpretation.Monochrome1)
                {
                    pipeline.ColorMap = ColorTable.Monochrome1;
                }
                else
                {
                    pipeline.ColorMap = ColorTable.Monochrome2;
                }
                WindowLevel[] wl = WindowLevel.FromDataset(dataset);
                if (wl.Length > 0)
                {
                    pipeline.WindowLevel = wl[0];
                }
                return(pipeline);
            }
            else if (pi == PhotometricInterpretation.Rgb)
            {
                return(new RgbColorPipeline());
            }
            else
            {
                throw new DicomImagingException("Unsupported pipeline photometric interpretation: {0}", pi.Value);
            }
        }
예제 #11
0
            protected override byte[] CreateNormalizedPixelData()
            {
                string photometricInterpretation;

                byte[] pixelData = _framePixelData.GetUncompressedPixelData(out photometricInterpretation);

                string photometricInterpretationCode = photometricInterpretation ?? Parent[DicomTags.PhotometricInterpretation].ToString();
                PhotometricInterpretation pi         = PhotometricInterpretation.FromCodeString(photometricInterpretationCode);

                if (pi.IsColor)
                {
                    pixelData = ToArgb(this.Parent, pixelData, pi);
                }
                else
                {
                    var overlayPlaneModuleIod = new OverlayPlaneModuleIod(Parent);
                    foreach (var overlayPlane in overlayPlaneModuleIod)
                    {
                        if (!overlayPlane.HasOverlayData && _overlayData[overlayPlane.Index] == null)
                        {
                            // if the overlay is embedded in pixel data and we haven't cached it yet, extract it now before we normalize the frame pixel data
                            var overlayData = OverlayData.UnpackFromPixelData(overlayPlane.OverlayBitPosition, Parent[DicomTags.BitsAllocated].GetInt32(0, 0), false, pixelData);
                            _overlayData[overlayPlane.Index] = overlayData;
                        }
                    }

                    NormalizeGrayscalePixels(this.Parent, pixelData);
                }

                return(pixelData);
            }
예제 #12
0
        private void UpdateImageBox(DcmImageBox imageBox, String filename, int index)
        {
            //try
            //{
            var ff = new DicomFileFormat();

            ff.Load(filename, DicomReadOptions.DefaultWithoutDeferredLoading);
            if (ff.Dataset == null)
            {
                return;
            }

            ff.Dataset.ChangeTransferSyntax(DicomTransferSyntax.ImplicitVRLittleEndian, null);

            var pixelData = new DcmPixelData(ff.Dataset);
            var pi        = PhotometricInterpretation.Lookup(pixelData.PhotometricInterpretation);

            // Grayscale only printer?
            if (pi.IsColor && _supportsColorPrinting == false)
            {
                pixelData.Unload();
                return;
            }

            // Color only printer?
            if (pi.IsColor == false && _supportsGrayscalePrinting == false)
            {
                pixelData.Unload();
                return;
            }

            DicomUID        imageBoxSOPClassUID;
            DcmItemSequence seq;
            var             item = new DcmItemSequenceItem();

            pixelData.UpdateDataset(item.Dataset);

            if (pi.IsColor)
            {
                imageBoxSOPClassUID = DicomUID.BasicColorImageBoxSOPClass;
                seq = new DcmItemSequence(DicomTags.BasicColorImageSequence);
            }
            else
            {
                imageBoxSOPClassUID = DicomUID.BasicGrayscaleImageBoxSOPClass;
                seq = new DcmItemSequence(DicomTags.BasicGrayscaleImageSequence);
            }
            seq.AddSequenceItem(item);
            imageBox.Dataset.AddItem(seq);

            pixelData.Unload();

            imageBox.UpdateImageBox(imageBoxSOPClassUID);
            imageBox.ImageBoxPosition = (ushort)index;
            //}
            //catch (Exception)
            //{
            //}
        }
예제 #13
0
        /// <summary>
        /// Constructor.
        /// </summary>
        /// <param name="collection"></param>
        protected DicomPixelData(DicomAttributeCollection collection)
        {
            collection.LoadDicomFields(this);

            SopClass = SopClass.GetSopClass(collection[DicomTags.SopClassUid].GetString(0, string.Empty));

            if (collection.Contains(DicomTags.NumberOfFrames))
            {
                NumberOfFrames = collection[DicomTags.NumberOfFrames].GetInt32(0, 1);
            }
            if (collection.Contains(DicomTags.PlanarConfiguration))
            {
                PlanarConfiguration = collection[DicomTags.PlanarConfiguration].GetUInt16(0, 1);
            }
            if (collection.Contains(DicomTags.LossyImageCompression))
            {
                LossyImageCompression = collection[DicomTags.LossyImageCompression].GetString(0, string.Empty);
            }
            if (collection.Contains(DicomTags.LossyImageCompressionRatio))
            {
                LossyImageCompressionRatio = collection[DicomTags.LossyImageCompressionRatio].GetFloat32(0, 1.0f);
            }
            if (collection.Contains(DicomTags.LossyImageCompressionMethod))
            {
                LossyImageCompressionMethod = collection[DicomTags.LossyImageCompressionMethod].GetString(0, string.Empty);
            }
            if (collection.Contains(DicomTags.DerivationDescription))
            {
                DerivationDescription = collection[DicomTags.DerivationDescription].GetString(0, string.Empty);
            }
            if (collection.Contains(DicomTags.RescaleSlope))
            {
                RescaleSlope = collection[DicomTags.RescaleSlope].ToString();
            }
            if (collection.Contains(DicomTags.RescaleIntercept))
            {
                RescaleIntercept = collection[DicomTags.RescaleIntercept].ToString();
            }
            if (collection.Contains(DicomTags.ModalityLutSequence))
            {
                DicomAttribute attrib = collection[DicomTags.ModalityLutSequence];
                _hasDataModalityLut = !attrib.IsEmpty && !attrib.IsNull;
            }

            _linearVoiLuts = Window.GetWindowCenterAndWidth(collection);

            if (collection.Contains(DicomTags.VoiLutSequence))
            {
                DicomAttribute attrib = collection[DicomTags.VoiLutSequence];
                _hasDataVoiLuts = !attrib.IsEmpty && !attrib.IsNull;
            }

            if (PhotometricInterpretation.Equals(Iod.PhotometricInterpretation.PaletteColor.Code) && collection.Contains(DicomTags.RedPaletteColorLookupTableDescriptor))
            {
                _paletteColorLut    = PaletteColorLut.Create(collection);
                _hasPaletteColorLut = true;
            }
        }
예제 #14
0
        public void PhotometricInterpretationTest1()
        {
            Page target = new Page();
            PhotometricInterpretation expected = new PhotometricInterpretation();
            PhotometricInterpretation actual;

            actual = target.PhotometricInterpretation;
            Assert.Equal(expected, actual);
        }
예제 #15
0
        public void GivenSupported16bitTransferSyntax_WhenRetrievingFrameAndAskingForConversion_ReturnedFileHasExpectedTransferSyntax(
            DicomTransferSyntax tsFrom,
            DicomTransferSyntax tsTo,
            PhotometricInterpretation photometricInterpretation)
        {
            DicomFile dicomFile = StreamAndStoredFileFromDataset(photometricInterpretation, false, tsFrom).Result.dicomFile;

            dicomFile.Dataset.ToInstanceIdentifier();

            _transcoder.TranscodeFrame(dicomFile, 1, tsTo.UID.UID);
        }
예제 #16
0
        public async Task GivenSupported16bitTransferSyntax_WhenRetrievingFileAndAskingForConversion_ReturnedFileHasExpectedTransferSyntax(
            DicomTransferSyntax tsFrom,
            DicomTransferSyntax tsTo,
            PhotometricInterpretation photometricInterpretation)
        {
            (DicomFile dicomFile, Stream stream) = await StreamAndStoredFileFromDataset(photometricInterpretation, false, tsFrom);

            dicomFile.Dataset.ToInstanceIdentifier();

            Stream transcodedFile = await _transcoder.TranscodeFileAsync(stream, tsTo.UID.UID);

            ValidateTransferSyntax(tsTo, transcodedFile);
        }
예제 #17
0
            /// <summary>
            /// Called by the base class to create a new byte buffer containing normalized pixel data
            /// for this frame (8 or 16-bit grayscale, or 32-bit ARGB).
            /// </summary>
            /// <returns>A new byte buffer containing the normalized pixel data.</returns>
            protected override byte[] CreateNormalizedPixelData()
            {
                DicomMessageBase message = this.Parent.SourceMessage;

                CodeClock clock = new CodeClock();

                clock.Start();

                PhotometricInterpretation photometricInterpretation;

                byte[] rawPixelData = null;

                if (!message.TransferSyntax.Encapsulated)
                {
                    DicomUncompressedPixelData pixelData = new DicomUncompressedPixelData(message);
                    // DICOM library uses zero-based frame numbers
                    MemoryManager.Execute(delegate { rawPixelData = pixelData.GetFrame(_frameIndex); });

                    ExtractOverlayFrames(rawPixelData, pixelData.BitsAllocated);

                    photometricInterpretation = PhotometricInterpretation.FromCodeString(message.DataSet[DicomTags.PhotometricInterpretation]);
                }
                else if (DicomCodecRegistry.GetCodec(message.TransferSyntax) != null)
                {
                    DicomCompressedPixelData pixelData = new DicomCompressedPixelData(message);
                    string pi = null;

                    MemoryManager.Execute(delegate { rawPixelData = pixelData.GetFrame(_frameIndex, out pi); });

                    photometricInterpretation = PhotometricInterpretation.FromCodeString(pi);
                }
                else
                {
                    throw new DicomCodecException("Unsupported transfer syntax");
                }

                if (photometricInterpretation.IsColor)
                {
                    rawPixelData = ToArgb(message.DataSet, rawPixelData, photometricInterpretation);
                }
                else
                {
                    NormalizeGrayscalePixels(message.DataSet, rawPixelData);
                }

                clock.Stop();
                PerformanceReportBroker.PublishReport("DicomMessageSopDataSource", "CreateFrameNormalizedPixelData", clock.Seconds);

                return(rawPixelData);
            }
예제 #18
0
        public void SetPixelData(PixelData pixeldata)
        {
            if (IsColor)
            {
                _internalPhotometricInterpretation = null;
                PhotometricInterpretation          = "RGB";

                byte[] final = new byte[3 * Rows * Columns];
                ArgbToRgb(pixeldata.Raw, final);
                PixelData = final;
            }
            else
            {
                PixelData = pixeldata.Raw;
            }
        }
예제 #19
0
            protected override byte[] CreateNormalizedPixelData()
            {
                byte[] pixelData = _framePixelData.GetUncompressedPixelData();

                string photometricInterpretationCode = this.Parent[DicomTags.PhotometricInterpretation].ToString();
                PhotometricInterpretation pi         = PhotometricInterpretation.FromCodeString(photometricInterpretationCode);

                TransferSyntax ts = TransferSyntax.GetTransferSyntax(this.Parent.TransferSyntaxUid);

                if (pi.IsColor)
                {
                    if (ts == TransferSyntax.Jpeg2000ImageCompression ||
                        ts == TransferSyntax.Jpeg2000ImageCompressionLosslessOnly ||
                        ts == TransferSyntax.JpegExtendedProcess24 ||
                        ts == TransferSyntax.JpegBaselineProcess1)
                    {
                        pi = PhotometricInterpretation.Rgb;
                    }

                    pixelData = ToArgb(this.Parent, pixelData, pi);
                }
                else
                {
                    OverlayPlaneModuleIod opmi = new OverlayPlaneModuleIod(this.Parent);
                    foreach (OverlayPlane overlayPlane in opmi)
                    {
                        if (IsOverlayEmbedded(overlayPlane) && _overlayData[overlayPlane.Index] == null)
                        {
                            byte[] overlayData = OverlayData.UnpackFromPixelData(overlayPlane.OverlayBitPosition, this.Parent[DicomTags.BitsAllocated].GetInt32(0, 0), false, pixelData);
                            _overlayData[overlayPlane.Index] = overlayData;
                        }
                        else if (!overlayPlane.HasOverlayData)
                        {
                            Platform.Log(LogLevel.Warn, "The image {0} appears to be missing OverlayData for group 0x{1:X4}.", this.Parent.SopInstanceUid, overlayPlane.Group);
                        }
                    }

                    NormalizeGrayscalePixels(this.Parent, pixelData);
                }

                return(pixelData);
            }
예제 #20
0
        public static IPixelData Create(DicomPixelData pixelData, int frame)
        {
            PhotometricInterpretation pi = pixelData.PhotometricInterpretation;

            if (pi == PhotometricInterpretation.Monochrome1 || pi == PhotometricInterpretation.Monochrome2 || pi == PhotometricInterpretation.PaletteColor)
            {
                if (pixelData.BitsStored <= 8)
                {
                    return(new GrayscalePixelDataU8(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame)));
                }
                else if (pixelData.BitsStored <= 16)
                {
                    if (pixelData.PixelRepresentation == PixelRepresentation.Signed)
                    {
                        return(new GrayscalePixelDataS16(pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)));
                    }
                    else
                    {
                        return(new GrayscalePixelDataU16(pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)));
                    }
                }
                else
                {
                    throw new DicomImagingException("Unsupported pixel data value for bits stored: {0}", pixelData.BitsStored);
                }
            }
            else if (pi == PhotometricInterpretation.Rgb || pi == PhotometricInterpretation.YbrFull)
            {
                var buffer = pixelData.GetFrame(frame);
                if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar)
                {
                    buffer = PixelDataConverter.PlanarToInterleaved24(buffer);
                }
                return(new ColorPixelData24(pixelData.Width, pixelData.Height, buffer));
            }
            else
            {
                throw new DicomImagingException("Unsupported pixel data photometric interpretation: {0}", pi.Value);
            }
        }
예제 #21
0
            protected override byte[] CreateNormalizedPixelData()
            {
                byte[] pixelData = _framePixelData.GetUncompressedPixelData();

                string photometricInterpretationCode = this.Parent[DicomTags.PhotometricInterpretation].ToString();
                PhotometricInterpretation pi         = PhotometricInterpretation.FromCodeString(photometricInterpretationCode);

                TransferSyntax ts = TransferSyntax.GetTransferSyntax(this.Parent.TransferSyntaxUid);

                if (pi.IsColor)
                {
                    if (ts == TransferSyntax.Jpeg2000ImageCompression ||
                        ts == TransferSyntax.Jpeg2000ImageCompressionLosslessOnly ||
                        ts == TransferSyntax.JpegExtendedProcess24 ||
                        ts == TransferSyntax.JpegBaselineProcess1)
                    {
                        pi = PhotometricInterpretation.Rgb;
                    }

                    pixelData = ToArgb(this.Parent, pixelData, pi);
                }
                else
                {
                    var overlayPlaneModuleIod = new OverlayPlaneModuleIod(Parent);
                    foreach (var overlayPlane in overlayPlaneModuleIod)
                    {
                        if (!overlayPlane.HasOverlayData && _overlayData[overlayPlane.Index] == null)
                        {
                            // if the overlay is embedded in pixel data and we haven't cached it yet, extract it now before we normalize the frame pixel data
                            var overlayData = OverlayData.UnpackFromPixelData(overlayPlane.OverlayBitPosition, Parent[DicomTags.BitsAllocated].GetInt32(0, 0), false, pixelData);
                            _overlayData[overlayPlane.Index] = overlayData;
                        }
                    }

                    NormalizeGrayscalePixels(this.Parent, pixelData);
                }

                return(pixelData);
            }
예제 #22
0
        /// <summary>
        /// Converts pixel data of a particular photometric interpretation
        /// to ARGB.
        /// </summary>
        /// <param name="photometricInterpretation">The <see cref="PhotometricInterpretation"/> of <paramref name="srcPixelData"/>.</param>
        /// <param name="planarConfiguration">The planar configuration of <paramref name="srcPixelData"/>.</param>
        /// <param name="srcPixelData">The input pixel data to be converted.</param>
        /// <param name="argbPixelData">The converted output pixel data in ARGB format.</param>
        /// <remarks>
        /// Only RGB and YBR variants can be converted.  For PALETTE COLOR, use <see cref="ToArgb(int,bool,byte[],byte[],IDataLut)"/>.
        /// </remarks>
        public static void ToArgb(
            PhotometricInterpretation photometricInterpretation,
            int planarConfiguration,
            byte[] srcPixelData,
            byte[] argbPixelData)
        {
            int sizeInPixels = argbPixelData.Length / 4;

            if (photometricInterpretation == PhotometricInterpretation.Monochrome1 ||
                photometricInterpretation == PhotometricInterpretation.Monochrome2 ||
                photometricInterpretation == PhotometricInterpretation.PaletteColor ||
                photometricInterpretation == PhotometricInterpretation.Unknown)
            {
                throw new Exception("Invalid photometric interpretation.  Must be either RGB or a YBR variant.");
            }

            if (photometricInterpretation == PhotometricInterpretation.Rgb)
            {
                if (planarConfiguration == 0)
                {
                    RgbTripletToArgb(srcPixelData, argbPixelData, sizeInPixels);
                }
                else
                {
                    RgbPlanarToArgb(srcPixelData, argbPixelData, sizeInPixels);
                }
            }
            else
            {
                if (planarConfiguration == 0)
                {
                    YbrTripletToArgb(srcPixelData, argbPixelData, sizeInPixels, photometricInterpretation);
                }
                else
                {
                    YbrPlanarToArgb(srcPixelData, argbPixelData, sizeInPixels, photometricInterpretation);
                }
            }
        }
예제 #23
0
        private static Colorspace GetColorSpace(PhotometricInterpretation photometricInterpretation)
        {
            if (photometricInterpretation == PhotometricInterpretation.Rgb)
            {
                return(Colorspace.RGB);
            }
            else if (photometricInterpretation == PhotometricInterpretation.Monochrome1 ||
                     photometricInterpretation == PhotometricInterpretation.Monochrome2)
            {
                return(Colorspace.Grayscale);
            }
            else if (photometricInterpretation == PhotometricInterpretation.PaletteColor)
            {
                return(Colorspace.Grayscale);
            }
            else if (photometricInterpretation == PhotometricInterpretation.YbrFull ||
                     photometricInterpretation == PhotometricInterpretation.YbrFull422 ||
                     photometricInterpretation == PhotometricInterpretation.YbrPartial422)
            {
                return(Colorspace.YCbCr);
            }

            return(Colorspace.Unknown);
        }
예제 #24
0
    public static int Main(string[] args)
    {
        string filename = args[0];

        uint file_size = gdcm.PosixEmulation.FileSize(filename);

        // instantiate the reader:
        gdcm.ImageRegionReader reader = new gdcm.ImageRegionReader();
        reader.SetFileName(filename);

        // pull DICOM info:
        if (!reader.ReadInformation())
        {
            return(1);
        }

        // store current offset:
        uint cur_pos = reader.GetStreamCurrentPosition();

        uint remaining = file_size - cur_pos;

        Console.WriteLine("Remaining bytes to read (Pixel Data): " + remaining.ToString());

        // Get file infos
        gdcm.File f = reader.GetFile();

        // get some info about image
        UIntArrayType             dims      = ImageHelper.GetDimensionsValue(f);
        PixelFormat               pf        = ImageHelper.GetPixelFormatValue(f);
        int                       pixelsize = pf.GetPixelSize();
        PhotometricInterpretation pi        = ImageHelper.GetPhotometricInterpretationValue(f);

        Console.WriteLine(pi.toString());

        // buffer to get the pixels
        byte[] buffer = new byte[dims[0] * dims[1] * pixelsize];

        // define a simple box region.
        BoxRegion box = new BoxRegion();

        for (uint z = 0; z < dims[2]; z++)
        {
            // Define that I want the image 0, full size (dimx x dimy pixels)
            // and do that for each z:
            box.SetDomain(0, dims[0] - 1, 0, dims[1] - 1, z, z);
            //System.Console.WriteLine( box.toString() );
            reader.SetRegion(box);

            // reader will try to load the uncompressed image region into buffer.
            // the call returns an error when buffer.Length is too small. For instance
            // one can call:
            // uint buf_len = reader.ComputeBufferLength(); // take into account pixel size
            // to get the exact size of minimum buffer
            if (reader.ReadIntoBuffer(buffer, (uint)buffer.Length))
            {
                using (System.IO.Stream stream =
                           System.IO.File.Open(@"/tmp/frame.raw",
                                               System.IO.FileMode.Create))
                {
                    System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream);
                    writer.Write(buffer);
                }
            }
            else
            {
                throw new Exception("can't read pixels error");
            }
        }

        return(0);
    }
예제 #25
0
    public static int Main(string[] args)
    {
        string file1 = args[0];

        System.IO.FileStream infile =
            new System.IO.FileStream(file1, System.IO.FileMode.Open, System.IO.FileAccess.Read);
        uint fsize = gdcm.PosixEmulation.FileSize(file1);

        byte[] jstream = new byte[fsize];
        infile.Read(jstream, 0, jstream.Length);

        Trace.DebugOn();
        Image image = new Image();

        image.SetNumberOfDimensions(2); // important for now
        DataElement pixeldata = new DataElement(new gdcm.Tag(0x7fe0, 0x0010));

        // DO NOT set a ByteValue here, JPEG is a particular kind of encapsulated syntax
        // in which can one cannot use a simple byte array for storage. Instead, see
        // gdcm.SequenceOfFragments
        //pixeldata.SetByteValue( jstream, new gdcm.VL( (uint)jstream.Length ) );

        // Create a new SequenceOfFragments C++ object, store it as a SmartPointer :
        SmartPtrFrag sq   = SequenceOfFragments.New();
        Fragment     frag = new Fragment();

        frag.SetByteValue(jstream, new gdcm.VL((uint)jstream.Length));
        // Single file => single fragment
        sq.AddFragment(frag);
        // Pass by reference:
        pixeldata.SetValue(sq.__ref__());

        // insert:
        image.SetDataElement(pixeldata);

        // JPEG use YBR to achieve better compression ratio by default (not RGB)
        // FIXME hardcoded:
        PhotometricInterpretation pi = new PhotometricInterpretation(PhotometricInterpretation.PIType.YBR_FULL);

        image.SetPhotometricInterpretation(pi);
        // FIXME hardcoded:
        PixelFormat pixeltype = new PixelFormat(3, 8, 8, 7);

        image.SetPixelFormat(pixeltype);

        // FIXME hardcoded:
        image.SetTransferSyntax(new TransferSyntax(TransferSyntax.TSType.JPEGLosslessProcess14_1));
        image.SetDimension(0, 692);
        image.SetDimension(1, 721);

        // Decompress !
        byte[] decompressedData = new byte[(int)image.GetBufferLength()];
        image.GetBuffer(decompressedData);

        // Write out the decompressed bytes
        System.Console.WriteLine(image.toString());
        using (System.IO.Stream stream =
                   System.IO.File.Open(@"/tmp/dd.raw",
                                       System.IO.FileMode.Create))
        {
            System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream);
            writer.Write(decompressedData);
        }


        return(0);
    }
예제 #26
0
        private async Task <(DicomFile dicomFile, Stream stream)> StreamAndStoredFileFromDataset(PhotometricInterpretation photometricInterpretation, bool is8BitPixelData, DicomTransferSyntax transferSyntax)
        {
            var dicomFile = is8BitPixelData ?
                            Samples.CreateRandomDicomFileWith8BitPixelData(transferSyntax: transferSyntax.UID.UID, photometricInterpretation: photometricInterpretation.Value, frames: 2)
                : Samples.CreateRandomDicomFileWith16BitPixelData(transferSyntax: transferSyntax.UID.UID, photometricInterpretation: photometricInterpretation.Value, frames: 2);

            MemoryStream stream = _recyclableMemoryStreamManager.GetStream();
            await dicomFile.SaveAsync(stream);

            stream.Position = 0;

            return(dicomFile, stream);
        }
예제 #27
0
        /// <summary>
        /// Create <see cref="IPixelData"/> form <see cref="DicomPixelData"/> 
        /// according to the input <paramref name="pixelData"/> <seealso cref="PhotometricInterpretation"/>
        /// </summary>
        /// <param name="pixelData">Input pixel data</param>
        /// <param name="frame">Frame number (0 based)</param>
        /// <returns>Implementation of <seealso cref="IPixelData"/> according to <seealso cref="PhotometricInterpretation"/></returns>
        public static IPixelData Create(DicomPixelData pixelData, int frame)
        {
            PhotometricInterpretation pi = pixelData.PhotometricInterpretation;

            if (pi == null)
            {
                // generally ACR-NEMA
                var samples = pixelData.SamplesPerPixel;
                if (samples == 0 || samples == 1)
                {
                    pi = pixelData.Dataset.Contains(DicomTag.RedPaletteColorLookupTableData)
                        ? PhotometricInterpretation.PaletteColor
                        : PhotometricInterpretation.Monochrome2;
                }
                else
                {
                    // assume, probably incorrectly, that the image is RGB
                    pi = PhotometricInterpretation.Rgb;
                }
            }

            if (pixelData.BitsStored == 1)
            {
                if (pixelData.Dataset.GetSingleValue<DicomUID>(DicomTag.SOPClassUID)
                    == DicomUID.MultiFrameSingleBitSecondaryCaptureImageStorage)
                    // Multi-frame Single Bit Secondary Capture is stored LSB -> MSB
                    return new SingleBitPixelData(
                        pixelData.Width,
                        pixelData.Height,
                        PixelDataConverter.ReverseBits(pixelData.GetFrame(frame)));
                else
                // Need sample images to verify that this is correct
                    return new SingleBitPixelData(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame));
            }
            else if (pi == PhotometricInterpretation.Monochrome1 || pi == PhotometricInterpretation.Monochrome2
                     || pi == PhotometricInterpretation.PaletteColor)
            {
                if (pixelData.BitsAllocated == 8 && pixelData.HighBit == 7 && pixelData.BitsStored == 8)
                    return new GrayscalePixelDataU8(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame));
                else if (pixelData.BitsAllocated <= 16)
                {
                    if (pixelData.PixelRepresentation == PixelRepresentation.Signed)
                        return new GrayscalePixelDataS16(
                            pixelData.Width,
                            pixelData.Height,
                            pixelData.BitDepth,
                            pixelData.GetFrame(frame));
                    else
                        return new GrayscalePixelDataU16(
                            pixelData.Width,
                            pixelData.Height,
                            pixelData.BitDepth,
                            pixelData.GetFrame(frame));
                }
                else if (pixelData.BitsAllocated <= 32)
                {
                    if (pixelData.PixelRepresentation == PixelRepresentation.Signed)
                        return new GrayscalePixelDataS32(
                            pixelData.Width,
                            pixelData.Height,
                            pixelData.BitDepth,
                            pixelData.GetFrame(frame));
                    else
                        return new GrayscalePixelDataU32(
                            pixelData.Width,
                            pixelData.Height,
                            pixelData.BitDepth,
                            pixelData.GetFrame(frame));
                }
                else
                    throw new DicomImagingException(
                        "Unsupported pixel data value for bits stored: {0}",
                        pixelData.BitsStored);
            }
            else if (pi == PhotometricInterpretation.Rgb || pi == PhotometricInterpretation.YbrFull
                     || pi == PhotometricInterpretation.YbrFull422 || pi == PhotometricInterpretation.YbrPartial422)
            {
                var buffer = pixelData.GetFrame(frame);

                if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar) buffer = PixelDataConverter.PlanarToInterleaved24(buffer);

                if (pi == PhotometricInterpretation.YbrFull) buffer = PixelDataConverter.YbrFullToRgb(buffer);
                else if (pi == PhotometricInterpretation.YbrFull422) buffer = PixelDataConverter.YbrFull422ToRgb(buffer, pixelData.Width);
                else if (pi == PhotometricInterpretation.YbrPartial422) buffer = PixelDataConverter.YbrPartial422ToRgb(buffer, pixelData.Width);

                return new ColorPixelData24(pixelData.Width, pixelData.Height, buffer);
            }
            else if (pi == PhotometricInterpretation.YbrFull422)
            {
                var buffer = pixelData.GetFrame(frame);
                if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar) throw new DicomImagingException("Unsupported planar configuration for YBR_FULL_422");
                return new ColorPixelData24(pixelData.Width, pixelData.Height, buffer);
            }
            else
            {
                throw new DicomImagingException(
                    "Unsupported pixel data photometric interpretation: {0}",
                    pi.Value);
            }
        }
예제 #28
0
  public static int Main(string[] args)
    {
    string file1 = args[0];
    Mpeg2VideoInfo info = new Mpeg2VideoInfo(file1);
    System.Console.WriteLine( info.StartTime );
    System.Console.WriteLine( info.EndTime );
    System.Console.WriteLine( info.Duration );
    System.Console.WriteLine( info.AspectRatio );
    System.Console.WriteLine( info.FrameRate );
    System.Console.WriteLine( info.PictureWidth );
    System.Console.WriteLine( info.PictureHeight );

    ImageReader r = new ImageReader();
    //Image image = new Image();
    Image image = r.GetImage();
    image.SetNumberOfDimensions( 3 );
    DataElement pixeldata = new DataElement( new gdcm.Tag(0x7fe0,0x0010) );

    System.IO.FileStream infile =
      new System.IO.FileStream(file1, System.IO.FileMode.Open, System.IO.FileAccess.Read);
    uint fsize = gdcm.PosixEmulation.FileSize(file1);

    byte[] jstream  = new byte[fsize];
    infile.Read(jstream, 0 , jstream.Length);

    SmartPtrFrag sq = SequenceOfFragments.New();
    Fragment frag = new Fragment();
    frag.SetByteValue( jstream, new gdcm.VL( (uint)jstream.Length) );
    sq.AddFragment( frag );
    pixeldata.SetValue( sq.__ref__() );

    // insert:
    image.SetDataElement( pixeldata );

    PhotometricInterpretation pi = new PhotometricInterpretation( PhotometricInterpretation.PIType.YBR_PARTIAL_420 );
    image.SetPhotometricInterpretation( pi );
    // FIXME hardcoded:
    PixelFormat pixeltype = new PixelFormat(3,8,8,7);
    image.SetPixelFormat( pixeltype );

    // FIXME hardcoded:
    TransferSyntax ts = new TransferSyntax( TransferSyntax.TSType.MPEG2MainProfile);
    image.SetTransferSyntax( ts );

    image.SetDimension(0, (uint)info.PictureWidth);
    image.SetDimension(1, (uint)info.PictureHeight);
    image.SetDimension(2, 721);

    ImageWriter writer = new ImageWriter();
    gdcm.File file = writer.GetFile();
    file.GetHeader().SetDataSetTransferSyntax( ts );
    Anonymizer anon = new Anonymizer();
    anon.SetFile( file );

    MediaStorage ms = new MediaStorage( MediaStorage.MSType.VideoEndoscopicImageStorage);

    UIDGenerator gen = new UIDGenerator();
    anon.Replace( new Tag(0x0008,0x16), ms.GetString() );
    anon.Replace( new Tag(0x0018,0x40), "25" );
    anon.Replace( new Tag(0x0018,0x1063), "40.000000" );
    anon.Replace( new Tag(0x0028,0x34), "4\\3" );
    anon.Replace( new Tag(0x0028,0x2110), "01" );

    writer.SetImage( image );
    writer.SetFileName( "dummy.dcm" );
    if( !writer.Write() )
      {
      System.Console.WriteLine( "Could not write" );
      return 1;
      }

    return 0;
    }
예제 #29
0
		public ImageLoader (ImageDirectory directory) 
		{
			width = directory.Lookup (TagId.ImageWidth).ValueAsLong [0];
			length = directory.Lookup (TagId.ImageLength).ValueAsLong [0];
			bps = directory.Lookup (TagId.BitsPerSample).ValueAsLong;
			
			compression = (Compression) directory.Lookup (TagId.Compression).ValueAsLong [0];
			interpretation = (PhotometricInterpretation) directory.Lookup (TagId.PhotometricInterpretation).ValueAsLong [0];
			
			offsets = directory.Lookup (TagId.StripOffsets).ValueAsLong;
			strip_byte_counts = directory.Lookup (TagId.StripByteCounts).ValueAsLong;
			rows_per_strip = directory.Lookup (TagId.RowsPerStrip).ValueAsLong [0];

			if (interpretation != 
		}
  public static int Main(string[] args)
    {
    string directory = args[0];
    gdcm.Directory dir = new gdcm.Directory();
    uint nfiles = dir.Load(directory);
    //System.Console.WriteLine(dir.toString());
    gdcm.FilenamesType filenames = dir.GetFilenames();

    Image image = new Image();
    image.SetNumberOfDimensions( 3 ); // important for now
    DataElement pixeldata = new DataElement( new gdcm.Tag(0x7fe0,0x0010) );

    // Create a new SequenceOfFragments C++ object, store it as a SmartPointer :
    SmartPtrFrag sq = SequenceOfFragments.New();

    // Yeah, the file are not garantee to be in order, please adapt...
    for(uint i = 0; i < nfiles; ++i)
      {
      System.Console.WriteLine( filenames[(int)i] );
      string file = filenames[(int)i];
      System.IO.FileStream infile =
        new System.IO.FileStream(file, System.IO.FileMode.Open, System.IO.FileAccess.Read);
      uint fsize = gdcm.PosixEmulation.FileSize(file);

      byte[] jstream  = new byte[fsize];
      infile.Read(jstream, 0 , jstream.Length);

      Fragment frag = new Fragment();
      frag.SetByteValue( jstream, new gdcm.VL( (uint)jstream.Length) );
      sq.AddFragment( frag );
      }

    // Pass by reference:
    pixeldata.SetValue( sq.__ref__() );

    // insert:
    image.SetDataElement( pixeldata );

    // JPEG use YBR to achieve better compression ratio by default (not RGB)
    // FIXME hardcoded:
    PhotometricInterpretation pi = new PhotometricInterpretation( PhotometricInterpretation.PIType.MONOCHROME2 );
    image.SetPhotometricInterpretation( pi );
    // FIXME hardcoded:
    PixelFormat pixeltype = new PixelFormat(1,8,8,7);
    image.SetPixelFormat( pixeltype );

    // FIXME hardcoded:
    image.SetTransferSyntax( new TransferSyntax( TransferSyntax.TSType.JPEGLosslessProcess14_1 ) );
    image.SetDimension(0, 512);
    image.SetDimension(1, 512);
    image.SetDimension(2, 355);

    // Decompress !
    byte[] decompressedData = new byte[(int)image.GetBufferLength()];
    image.GetBuffer(decompressedData);

    // Write out the decompressed bytes
    System.Console.WriteLine(image.toString());
    using (System.IO.Stream stream =
      System.IO.File.Open(@"/tmp/dd.raw",
        System.IO.FileMode.Create))
      {
      System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream);
      writer.Write(decompressedData);
      }


    return 0;
    }
예제 #31
0
        /// <summary>
        /// Validates that the input image property relationships are compatible.
        /// </summary>
        public static void ValidateImagePropertyRelationships(int bitsStored, int bitsAllocated, int highBit, PhotometricInterpretation photometricInterpretation, int planarConfiguration, int samplesPerPixel)
        {
            if (bitsStored > bitsAllocated)
            {
                throw new DicomDataException(String.Format(SR.ExceptionInvalidBitsStoredBitsAllocated, bitsStored, bitsAllocated));
            }

            if (highBit > bitsAllocated - 1)
            {
                throw new DicomDataException(String.Format(SR.ExceptionInvalidHighBitBitsAllocated, highBit, bitsAllocated));
            }

            if ((photometricInterpretation == PhotometricInterpretation.Monochrome1 ||
                 photometricInterpretation == PhotometricInterpretation.Monochrome2) &&
                samplesPerPixel != 1)
            {
                throw new DicomDataException(String.Format(SR.ExceptionInvalidPhotometricInterpretationSamplesPerPixel, photometricInterpretation, samplesPerPixel));
            }

            if (samplesPerPixel != 1)
            {
                if (planarConfiguration != 0 && planarConfiguration != 1)
                {
                    throw new DicomDataException(String.Format(SR.ExceptionInvalidPlanarConfiguration));
                }
            }

            if ((photometricInterpretation == PhotometricInterpretation.Rgb ||
                 photometricInterpretation == PhotometricInterpretation.YbrFull ||
                 photometricInterpretation == PhotometricInterpretation.YbrFull422 ||
                 photometricInterpretation == PhotometricInterpretation.YbrPartial422 ||
                 photometricInterpretation == PhotometricInterpretation.YbrIct ||
                 photometricInterpretation == PhotometricInterpretation.YbrRct) &&
                samplesPerPixel != 3)
            {
                throw new DicomDataException(String.Format(SR.ExceptionInvalidPhotometricInterpretationSamplesPerPixel, photometricInterpretation, samplesPerPixel));
            }
        }
예제 #32
0
        /// <summary>
        /// Converts colour pixel data to ARGB.
        /// </summary>
        protected static byte[] ToArgb(IDicomAttributeProvider dicomAttributeProvider, byte[] pixelData, PhotometricInterpretation photometricInterpretation)
        {
            CodeClock clock = new CodeClock();

            clock.Start();

            int rows        = dicomAttributeProvider[DicomTags.Rows].GetInt32(0, 0);
            int columns     = dicomAttributeProvider[DicomTags.Columns].GetInt32(0, 0);
            int sizeInBytes = rows * columns * 4;

            byte[] argbPixelData = MemoryManager.Allocate <byte>(sizeInBytes);

            // Convert palette colour images to ARGB so we don't get interpolation artifacts
            // when rendering.
            if (photometricInterpretation == PhotometricInterpretation.PaletteColor)
            {
                int bitsAllocated       = dicomAttributeProvider[DicomTags.BitsAllocated].GetInt32(0, 0);
                int pixelRepresentation = dicomAttributeProvider[DicomTags.PixelRepresentation].GetInt32(0, 0);

                ColorSpaceConverter.ToArgb(
                    bitsAllocated,
                    pixelRepresentation != 0 ? true : false,
                    pixelData,
                    argbPixelData,
                    PaletteColorMap.Create(dicomAttributeProvider));
            }
            // Convert RGB and YBR variants to ARGB
            else
            {
                int planarConfiguration = dicomAttributeProvider[DicomTags.PlanarConfiguration].GetInt32(0, 0);

                ColorSpaceConverter.ToArgb(
                    photometricInterpretation,
                    planarConfiguration,
                    pixelData,
                    argbPixelData);
            }

            clock.Stop();
            PerformanceReportBroker.PublishReport("DicomMessageSopDataSource", "ToArgb", clock.Seconds);

            return(argbPixelData);
        }
예제 #33
0
    public static int Main(string[] args)
    {
        string file1 = args[0];
        System.IO.FileStream infile =
          new System.IO.FileStream(file1, System.IO.FileMode.Open, System.IO.FileAccess.Read);
        uint fsize = gdcm.PosixEmulation.FileSize(file1);

        byte[] jstream  = new byte[fsize];
        infile.Read(jstream, 0 , jstream.Length);

        Trace.DebugOn();
        Image image = new Image();
        image.SetNumberOfDimensions( 2 ); // important for now
        DataElement pixeldata = new DataElement( new gdcm.Tag(0x7fe0,0x0010) );

        // DO NOT set a ByteValue here, JPEG is a particular kind of encapsulated syntax
        // in which can one cannot use a simple byte array for storage. Instead, see
        // gdcm.SequenceOfFragments
        //pixeldata.SetByteValue( jstream, new gdcm.VL( (uint)jstream.Length ) );

        // Create a new SequenceOfFragments C++ object, store it as a SmartPointer :
        SmartPtrFrag sq = SequenceOfFragments.New();
        Fragment frag = new Fragment();
        frag.SetByteValue( jstream, new gdcm.VL( (uint)jstream.Length) );
        // Single file => single fragment
        sq.AddFragment( frag );
        // Pass by reference:
        pixeldata.SetValue( sq.__ref__() );

        // insert:
        image.SetDataElement( pixeldata );

        // JPEG use YBR to achieve better compression ratio by default (not RGB)
        // FIXME hardcoded:
        PhotometricInterpretation pi = new PhotometricInterpretation( PhotometricInterpretation.PIType.YBR_FULL );
        image.SetPhotometricInterpretation( pi );
        // FIXME hardcoded:
        PixelFormat pixeltype = new PixelFormat(3,8,8,7);
        image.SetPixelFormat( pixeltype );

        // FIXME hardcoded:
        image.SetTransferSyntax( new TransferSyntax( TransferSyntax.TSType.JPEGLosslessProcess14_1 ) );
        image.SetDimension(0, 692);
        image.SetDimension(1, 721);

        // Decompress !
        byte[] decompressedData = new byte[(int)image.GetBufferLength()];
        image.GetBuffer(decompressedData);

        // Write out the decompressed bytes
        System.Console.WriteLine(image.toString());
        using (System.IO.Stream stream =
          System.IO.File.Open(@"/tmp/dd.raw",
        System.IO.FileMode.Create))
          {
          System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream);
          writer.Write(decompressedData);
          }

        return 0;
    }
    public static int Main(string[] args)
    {
        string directory = args[0];

        gdcm.Directory dir    = new gdcm.Directory();
        uint           nfiles = dir.Load(directory);

        //System.Console.WriteLine(dir.toString());
        gdcm.FilenamesType filenames = dir.GetFilenames();

        Image image = new Image();

        image.SetNumberOfDimensions(3); // important for now
        DataElement pixeldata = new DataElement(new gdcm.Tag(0x7fe0, 0x0010));

        // Create a new SequenceOfFragments C++ object, store it as a SmartPointer :
        SmartPtrFrag sq = SequenceOfFragments.New();

        // Yeah, the file are not garantee to be in order, please adapt...
        for (uint i = 0; i < nfiles; ++i)
        {
            System.Console.WriteLine(filenames[(int)i]);
            string file = filenames[(int)i];
            System.IO.FileStream infile =
                new System.IO.FileStream(file, System.IO.FileMode.Open, System.IO.FileAccess.Read);
            uint fsize = gdcm.PosixEmulation.FileSize(file);

            byte[] jstream = new byte[fsize];
            infile.Read(jstream, 0, jstream.Length);

            Fragment frag = new Fragment();
            frag.SetByteValue(jstream, new gdcm.VL((uint)jstream.Length));
            sq.AddFragment(frag);
        }

        // Pass by reference:
        pixeldata.SetValue(sq.__ref__());

        // insert:
        image.SetDataElement(pixeldata);

        // JPEG use YBR to achieve better compression ratio by default (not RGB)
        // FIXME hardcoded:
        PhotometricInterpretation pi = new PhotometricInterpretation(PhotometricInterpretation.PIType.MONOCHROME2);

        image.SetPhotometricInterpretation(pi);
        // FIXME hardcoded:
        PixelFormat pixeltype = new PixelFormat(1, 8, 8, 7);

        image.SetPixelFormat(pixeltype);

        // FIXME hardcoded:
        image.SetTransferSyntax(new TransferSyntax(TransferSyntax.TSType.JPEGLosslessProcess14_1));
        image.SetDimension(0, 512);
        image.SetDimension(1, 512);
        image.SetDimension(2, 355);

        // Decompress !
        byte[] decompressedData = new byte[(int)image.GetBufferLength()];
        image.GetBuffer(decompressedData);

        // Write out the decompressed bytes
        System.Console.WriteLine(image.toString());
        using (System.IO.Stream stream =
                   System.IO.File.Open(@"/tmp/dd.raw",
                                       System.IO.FileMode.Create))
        {
            System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream);
            writer.Write(decompressedData);
        }


        return(0);
    }
예제 #35
0
    public static int Main(string[] args)
    {
        string      file1  = args[0];
        string      file2  = args[1];
        ImageReader reader = new ImageReader();

        reader.SetFileName(file1);
        bool ret = reader.Read();

        if (!ret)
        {
            return(1);
        }

        Image image = new Image();
        Image ir    = reader.GetImage();

        image.SetNumberOfDimensions(ir.GetNumberOfDimensions());

        //Just for fun:
        //int dircos =  ir.GetDirectionCosines();
        //t = gdcm.Orientation.GetType(dircos);
        //int l = gdcm.Orientation.GetLabel(t);
        //System.Console.WriteLine( "Orientation label:" + l );

        // Set the dimensions,
        // 1. either one at a time
        //image.SetDimension(0, ir.GetDimension(0) );
        //image.SetDimension(1, ir.GetDimension(1) );

        // 2. the array at once
        uint[] dims = { 0, 0 };
        // Just for fun let's invert the dimensions:
        dims[0] = ir.GetDimension(1);
        dims[1] = ir.GetDimension(0);
        ir.SetDimensions(dims);

        PixelFormat pixeltype = ir.GetPixelFormat();

        image.SetPixelFormat(pixeltype);

        PhotometricInterpretation pi = ir.GetPhotometricInterpretation();

        image.SetPhotometricInterpretation(pi);

        DataElement pixeldata = new DataElement(new Tag(0x7fe0, 0x0010));

        byte[] str1 = new byte[ir.GetBufferLength()];
        ir.GetBuffer(str1);
        //System.Console.WriteLine( ir.GetBufferLength() );
        pixeldata.SetByteValue(str1, new VL((uint)str1.Length));
        //image.SetDataElement( pixeldata );
        ir.SetDataElement(pixeldata);


        ImageWriter writer = new ImageWriter();

        writer.SetFileName(file2);
        writer.SetFile(reader.GetFile());
        writer.SetImage(ir);
        ret = writer.Write();
        if (!ret)
        {
            return(1);
        }

        return(0);
    }