public override void Decode(DicomPixelData oldPixelData, DicomPixelData newPixelData, DicomCodecParams parameters) { if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux) && !RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { throw new InvalidOperationException("Unsupported OS Platform"); } for (int frame = 0; frame < oldPixelData.NumberOfFrames; frame++) { IByteBuffer jpegData = oldPixelData.GetFrame(frame); //Converting photmetricinterpretation YbrFull or YbrFull422 to RGB if (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrFull) { jpegData = PixelDataConverter.YbrFullToRgb(jpegData); oldPixelData.PhotometricInterpretation = PhotometricInterpretation.Rgb; } else if (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrFull422) { jpegData = PixelDataConverter.YbrFull422ToRgb(jpegData, oldPixelData.Width); oldPixelData.PhotometricInterpretation = PhotometricInterpretation.Rgb; } PinnedByteArray jpegArray = new PinnedByteArray(jpegData.Data); byte[] frameData = new byte[newPixelData.UncompressedFrameSize]; PinnedByteArray frameArray = new PinnedByteArray(frameData); JlsParameters jls = new JlsParameters(); char[] errorMessage = new char[256]; // IMPORT JpegLsDecode unsafe { if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { CharlsApiResultType err = JpegLSDecode_Linux64((void *)frameArray.Pointer, frameData.Length, (void *)jpegArray.Pointer, Convert.ToUInt32(jpegData.Size), ref jls, errorMessage); } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { CharlsApiResultType err = JpegLSDecode_Windows64((void *)frameArray.Pointer, frameData.Length, (void *)jpegArray.Pointer, Convert.ToUInt32(jpegData.Size), ref jls, errorMessage); } IByteBuffer buffer; if (frameData.Length >= (1 * 1024 * 1024) || oldPixelData.NumberOfFrames > 1) { buffer = new TempFileBuffer(frameData); } else { buffer = new MemoryByteBuffer(frameData); } buffer = EvenLengthBuffer.Create(buffer); newPixelData.AddFrame(buffer); } } }
/// <summary> /// Create <see cref="IPixelData"/> form <see cref="DicomPixelData"/> /// according to the input <paramref name="pixelData"/> <seealso cref="PhotometricInterpretation"/> /// </summary> /// <param name="pixelData">Input pixel data</param> /// <param name="frame">Frame number (0 based)</param> /// <returns>Implementation of <seealso cref="IPixelData"/> according to <seealso cref="PhotometricInterpretation"/></returns> public static IPixelData Create(DicomPixelData pixelData, int frame) { PhotometricInterpretation pi = pixelData.PhotometricInterpretation; if (pi == null) { // generally ACR-NEMA var samples = pixelData.SamplesPerPixel; if (samples == 0 || samples == 1) { pi = pixelData.Dataset.Contains(DicomTag.RedPaletteColorLookupTableData) ? PhotometricInterpretation.PaletteColor : PhotometricInterpretation.Monochrome2; } else { // assume, probably incorrectly, that the image is RGB pi = PhotometricInterpretation.Rgb; } } if (pixelData.BitsStored == 1) { if (pixelData.Dataset.GetSingleValue<DicomUID>(DicomTag.SOPClassUID) == DicomUID.MultiFrameSingleBitSecondaryCaptureImageStorage) // Multi-frame Single Bit Secondary Capture is stored LSB -> MSB return new SingleBitPixelData( pixelData.Width, pixelData.Height, PixelDataConverter.ReverseBits(pixelData.GetFrame(frame))); else // Need sample images to verify that this is correct return new SingleBitPixelData(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame)); } else if (pi == PhotometricInterpretation.Monochrome1 || pi == PhotometricInterpretation.Monochrome2 || pi == PhotometricInterpretation.PaletteColor) { if (pixelData.BitsAllocated == 8 && pixelData.HighBit == 7 && pixelData.BitsStored == 8) return new GrayscalePixelDataU8(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame)); else if (pixelData.BitsAllocated <= 16) { if (pixelData.PixelRepresentation == PixelRepresentation.Signed) return new GrayscalePixelDataS16( pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)); else return new GrayscalePixelDataU16( pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)); } else if (pixelData.BitsAllocated <= 32) { if (pixelData.PixelRepresentation == PixelRepresentation.Signed) return new GrayscalePixelDataS32( pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)); else return new GrayscalePixelDataU32( pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)); } else throw new DicomImagingException( "Unsupported pixel data value for bits stored: {0}", pixelData.BitsStored); } else if (pi == PhotometricInterpretation.Rgb || pi == PhotometricInterpretation.YbrFull || pi == PhotometricInterpretation.YbrFull422 || pi == PhotometricInterpretation.YbrPartial422) { var buffer = pixelData.GetFrame(frame); if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar) buffer = PixelDataConverter.PlanarToInterleaved24(buffer); if (pi == PhotometricInterpretation.YbrFull) buffer = PixelDataConverter.YbrFullToRgb(buffer); else if (pi == PhotometricInterpretation.YbrFull422) buffer = PixelDataConverter.YbrFull422ToRgb(buffer, pixelData.Width); else if (pi == PhotometricInterpretation.YbrPartial422) buffer = PixelDataConverter.YbrPartial422ToRgb(buffer, pixelData.Width); return new ColorPixelData24(pixelData.Width, pixelData.Height, buffer); } else if (pi == PhotometricInterpretation.YbrFull422) { var buffer = pixelData.GetFrame(frame); if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar) throw new DicomImagingException("Unsupported planar configuration for YBR_FULL_422"); return new ColorPixelData24(pixelData.Width, pixelData.Height, buffer); } else { throw new DicomImagingException( "Unsupported pixel data photometric interpretation: {0}", pi.Value); } }
public override unsafe void Encode(DicomPixelData oldPixelData, DicomPixelData newPixelData, DicomCodecParams parameters) { if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux) && !RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { throw new InvalidOperationException("Unsupported OS Platform"); } if ((oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrPartial422) || (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrPartial420)) { throw new DicomCodecException("Photometric Interpretation '{0}' not supported by JPEG-LS encoder", oldPixelData.PhotometricInterpretation); } DicomJpegLsParams jparams = (DicomJpegLsParams)parameters; if (jparams == null) { jparams = (DicomJpegLsParams)GetDefaultParameters(); } //IMPORT JLSPARAMETERS (DLLIMPORT) JlsParameters jls = new JlsParameters { width = oldPixelData.Width, height = oldPixelData.Height, bitsPerSample = oldPixelData.BitsStored, stride = oldPixelData.BytesAllocated * oldPixelData.Width * oldPixelData.SamplesPerPixel, components = oldPixelData.SamplesPerPixel, interleaveMode = oldPixelData.SamplesPerPixel == 1 ? CharlsInterleaveModeType.None : oldPixelData.PlanarConfiguration == PlanarConfiguration.Interleaved ? CharlsInterleaveModeType.Sample : CharlsInterleaveModeType.Line, colorTransformation = CharlsColorTransformationType.None }; if (TransferSyntax == DicomTransferSyntax.JPEGLSNearLossless) { jls.allowedLossyError = jparams.AllowedError; } for (int frame = 0; frame < oldPixelData.NumberOfFrames; frame++) { IByteBuffer frameData = oldPixelData.GetFrame(frame); //Converting photmetricinterpretation YbrFull or YbrFull422 to RGB if (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrFull) { frameData = PixelDataConverter.YbrFullToRgb(frameData); oldPixelData.PhotometricInterpretation = PhotometricInterpretation.Rgb; } else if (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrFull422) { frameData = PixelDataConverter.YbrFull422ToRgb(frameData, oldPixelData.Width); oldPixelData.PhotometricInterpretation = PhotometricInterpretation.Rgb; } PinnedByteArray frameArray = new PinnedByteArray(frameData.Data); byte[] jpegData = new byte[frameData.Size]; PinnedByteArray jpegArray = new PinnedByteArray(jpegData); uint jpegDataSize = 0; char[] errorMessage = new char[256]; // IMPORT JpegLsEncode unsafe { if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { CharlsApiResultType err = JpegLSEncode_Linux64((void *)jpegArray.Pointer, checked ((uint)jpegArray.Count), &jpegDataSize, (void *)frameArray.Pointer, checked ((uint)frameArray.Count), ref jls, errorMessage); } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { CharlsApiResultType err = JpegLSEncode_Windows64((void *)jpegArray.Pointer, checked ((uint)jpegArray.Count), &jpegDataSize, (void *)frameArray.Pointer, checked ((uint)frameArray.Count), ref jls, errorMessage); } Array.Resize(ref jpegData, (int)jpegDataSize); IByteBuffer buffer; if (jpegDataSize >= (1 * 1024 * 1024) || oldPixelData.NumberOfFrames > 1) { buffer = new TempFileBuffer(jpegData); } else { buffer = new MemoryByteBuffer(jpegData); } buffer = EvenLengthBuffer.Create(buffer); newPixelData.AddFrame(buffer); } } }