public static byte[] ConvertToBytes(this PixelDataConverter converter, IGrabResult grabResult) { if (grabResult.Width == 0 || grabResult.Height == 0) { return(new byte[0]); } var bufferSize = grabResult.Width * grabResult.Height; switch (grabResult.PixelTypeValue) { case PixelType.Mono8: converter.OutputPixelFormat = PixelType.Mono8; break; case PixelType.Mono12: case PixelType.Mono12p: case PixelType.Mono12packed: converter.OutputPixelFormat = PixelType.Mono16; bufferSize *= 2; break; default: throw new NotSupportedException($"Pixel type {grabResult.PixelTypeValue} not supported"); } var bytes = new byte[bufferSize]; converter.Convert(bytes, grabResult); return(bytes); }
public override void Decode(DicomPixelData oldPixelData, DicomPixelData newPixelData, DicomCodecParams parameters) { if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux) && !RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { throw new InvalidOperationException("Unsupported OS Platform"); } for (int frame = 0; frame < oldPixelData.NumberOfFrames; frame++) { IByteBuffer jpegData = oldPixelData.GetFrame(frame); //Converting photmetricinterpretation YbrFull or YbrFull422 to RGB if (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrFull) { jpegData = PixelDataConverter.YbrFullToRgb(jpegData); oldPixelData.PhotometricInterpretation = PhotometricInterpretation.Rgb; } else if (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrFull422) { jpegData = PixelDataConverter.YbrFull422ToRgb(jpegData, oldPixelData.Width); oldPixelData.PhotometricInterpretation = PhotometricInterpretation.Rgb; } PinnedByteArray jpegArray = new PinnedByteArray(jpegData.Data); byte[] frameData = new byte[newPixelData.UncompressedFrameSize]; PinnedByteArray frameArray = new PinnedByteArray(frameData); JlsParameters jls = new JlsParameters(); char[] errorMessage = new char[256]; // IMPORT JpegLsDecode unsafe { if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { CharlsApiResultType err = JpegLSDecode_Linux64((void *)frameArray.Pointer, frameData.Length, (void *)jpegArray.Pointer, Convert.ToUInt32(jpegData.Size), ref jls, errorMessage); } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { CharlsApiResultType err = JpegLSDecode_Windows64((void *)frameArray.Pointer, frameData.Length, (void *)jpegArray.Pointer, Convert.ToUInt32(jpegData.Size), ref jls, errorMessage); } IByteBuffer buffer; if (frameData.Length >= (1 * 1024 * 1024) || oldPixelData.NumberOfFrames > 1) { buffer = new TempFileBuffer(frameData); } else { buffer = new MemoryByteBuffer(frameData); } buffer = EvenLengthBuffer.Create(buffer); newPixelData.AddFrame(buffer); } } }
/// <summary> /// Basler相机回调函数 使用方法: Camera camera = new Camera(); camera.CameraOpened += /// Configuration.AcquireContinuous; camera.StreamGrabber.ImageGrabbed += /// HalconCameraConverter.OnImageGrabbed; HalconCameraConverter.ImageReceived += /// YourImageProcessFunction; camera.Open(); camera.StreamGrabber.UserData = deviceName; /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public static void OnImageGrabbed(object sender, ImageGrabbedEventArgs e) { IGrabResult grabResult = e.GrabResult; if (!grabResult.GrabSucceeded) { Task.Run(() => { throw new InvalidOperationException($"Balser camera error {grabResult.ErrorCode}: {grabResult.ErrorDescription}"); }); return; } HImage ho_Image; using (var bitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb)) { // Lock the bits of the bitmap. BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat); // Place the pointer to the buffer of the bitmap. var converter = new PixelDataConverter { OutputPixelFormat = PixelType.Mono8 }; IntPtr ptrBmp = bmpData.Scan0; converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult); //Exception handling TODO bitmap.UnlockBits(bmpData); ho_Image = new HImage("byte", grabResult.Width, grabResult.Height, ptrBmp); } OnImageReceived.BeginInvoke(sender, ho_Image, grabResult.StreamGrabberUserData.ToString(), EndingImageReceived, ho_Image); }
public Control_BaslerCamera(ICameraInfo cameraInfo) { _ngrabCount = -1; _ncount = 0; _camera = new Camera(cameraInfo); _camera.StreamGrabber.ImageGrabbed += StreamGrabber_ImageGrabbed; _camera.Open(); _converter = new Basler.Pylon.PixelDataConverter(); _converter.OutputPixelFormat = Basler.Pylon.PixelType.RGB8packed; }
public static Bitmap Convert(this PixelDataConverter converter, IGrabResult grabResult) { if (grabResult.Width == 0 || grabResult.Height == 0) { return(null); } var bitmap = new Bitmap(grabResult.Width, grabResult.Height, PixelFormat.Format32bppRgb); var rectangle = new Rectangle(0, 0, bitmap.Width, bitmap.Height); var bitmapData = bitmap.LockBits(rectangle, ImageLockMode.ReadWrite, bitmap.PixelFormat); converter.OutputPixelFormat = PixelType.BGRA8packed; var ptrBmp = bitmapData.Scan0; converter.Convert(ptrBmp, bitmapData.Stride * bitmap.Height, grabResult); bitmap.UnlockBits(bitmapData); return(bitmap); }
// Closes the camera object and handles exceptions. private void DestroyCamera() { // Disable all parameter controls. try { if (camera != null) { testImageControl.Parameter = null; pixelFormatControl.Parameter = null; widthSliderControl.Parameter = null; heightSliderControl.Parameter = null; gainSliderControl.Parameter = null; exposureTimeSliderControl.Parameter = null; } } catch (Exception exception) { ShowException(exception); } // Destroy the camera object. try { if (camera != null) { camera.Close(); camera.Dispose(); camera = null; } } catch (Exception exception) { ShowException(exception); } // Destroy the converter object. if (converter != null) { converter.Dispose(); converter = null; } }
public BaslerCameraControl()//StreamController guiStreamController, ImageViewer guiImageViewer, CameraLister guiCameraLister) { //StoreYawPitchRollOnCapture(e_valueType.wantedValue); camera = new BaslerCamera(CameraSelectionStrategy.FirstFound); streamController = new StreamController(); converter = new PixelDataConverter(); converter.OutputPixelFormat = cameraOutputPixelFormat; camera.StreamGrabber.ImageGrabbed += OnImageGrabbed; OpenCamera(); LOG(string.Format("Model: {0}", camera.Parameters[PLCamera.DeviceModelName].GetValue())); StartGrabbing(); ////if (camera.WaitForFrameTriggerReady(100, TimeoutHandling.Return) == true) //{ // camera.ExecuteSoftwareTrigger(); }
public static IPixelData Create(DicomPixelData pixelData, int frame) { PhotometricInterpretation pi = pixelData.PhotometricInterpretation; if (pi == PhotometricInterpretation.Monochrome1 || pi == PhotometricInterpretation.Monochrome2 || pi == PhotometricInterpretation.PaletteColor) { if (pixelData.BitsStored <= 8) { return(new GrayscalePixelDataU8(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame))); } else if (pixelData.BitsStored <= 16) { if (pixelData.PixelRepresentation == PixelRepresentation.Signed) { return(new GrayscalePixelDataS16(pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame))); } else { return(new GrayscalePixelDataU16(pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame))); } } else { throw new DicomImagingException("Unsupported pixel data value for bits stored: {0}", pixelData.BitsStored); } } else if (pi == PhotometricInterpretation.Rgb || pi == PhotometricInterpretation.YbrFull) { var buffer = pixelData.GetFrame(frame); if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar) { buffer = PixelDataConverter.PlanarToInterleaved24(buffer); } return(new ColorPixelData24(pixelData.Width, pixelData.Height, buffer)); } else { throw new DicomImagingException("Unsupported pixel data photometric interpretation: {0}", pi.Value); } }
private void DestroyCamera() { try { if (camera != null) { camera.StreamGrabber.Stop(); EnParamAll = false; Param = null; } } catch (Exception e) { Log.Set(string.Format("Exception: {0}", e.Message)); } // Destroy the camera object. try { if (camera != null) { camera.Close(); camera.Dispose(); camera = null; } } catch (Exception e) { Log.Set(string.Format("Exception: {0}", e.Message)); } // Destroy the converter object. if (converter != null) { converter.Dispose(); converter = null; } }
private void Connect_Camera() { if (camera != null) { DestroyCamera(); } try { // Create a new camera object. camera = new Camera(Tag); camera.CameraOpened += Configuration.AcquireContinuous; // Register for the events of the image provider needed for proper operation. camera.ConnectionLost += OnConnectionLost; camera.CameraOpened += OnCameraOpened; camera.CameraClosed += OnCameraClosed; camera.StreamGrabber.GrabStarted += OnGrabStarted; camera.StreamGrabber.ImageGrabbed += OnImageGrabbed; camera.StreamGrabber.GrabStopped += OnGrabStopped; // Open the connection to the camera device. camera.Open(); if (camera.IsOpen) { Param = new BaslerParam(camera); EnParamAll = true; converter = new PixelDataConverter(); } } catch (Exception e) { Log.Set(e.Message); } }
public short Open() { try { camera = new Basler.Pylon.Camera(); this.ModelName = camera.CameraInfo[CameraInfoKey.ModelName]; // Set the acquisition mode to free running continuous acquisition when the camera is opened. camera.CameraOpened += Configuration.AcquireContinuous; // Open the connection to the camera device. camera.Open(); // The parameter MaxNumBuffer can be used to control the amount of buffers // allocated for grabbing. The default value of this parameter is 10. camera.Parameters[PLCameraInstance.MaxNumBuffer].SetValue(5); //如果触发是AcquisisionStart,则要设置AcquisionFrameCount值为1 //camera.Parameters[PLCamera.AcquisitionFrameCount].SetValue(1); //设置参数 //camera.Parameters[PLCamera.ExposureAuto].SetValue(PLCamera.ExposureAuto.Off); //camera.Parameters[PLCamera.GainAuto].SetValue(PLCamera.GainAuto.Off); camera.Parameters[PLCamera.AcquisitionMode].SetValue(PLCamera.AcquisitionMode.Continuous);//连续取图模式 this.converter = new PixelDataConverter(); camera.StreamGrabber.ImageGrabbed += onIamgeGrabbed; return(0); } catch (Exception ex) { return(-1); } }
public BitmapSource GetBitmapSourceFromData(IGrabResult grabResult, int width, int height, string pixelFormat = "Gray8") { try { BitmapSource bmpSrc = null; PixelDataConverter converter = new PixelDataConverter(); //把pixelFormat转化为Bitmap的PixelFormat属性 Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format32bppRgb); BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat); // Lock the bits of the bitmap. converter.OutputPixelFormat = PixelType.BGRA8packed; // Place the pointer to the buffer of the bitmap. IntPtr ptrBmp = bmpData.Scan0; converter.Convert(ptrBmp, bmpData.Stride * bitmap.Height, grabResult); //Exception handling TODO bitmap.UnlockBits(bmpData); bmpSrc = BitmapToBitmapSource(bitmap); return(bmpSrc); } catch (Exception ex) { Log.L_I.WriteError(NameClass, ex); return(null); } }
/// <summary> /// Create <see cref="IPixelData"/> form <see cref="DicomPixelData"/> /// according to the input <paramref name="pixelData"/> <seealso cref="PhotometricInterpretation"/> /// </summary> /// <param name="pixelData">Input pixel data</param> /// <param name="frame">Frame number (0 based)</param> /// <returns>Implementation of <seealso cref="IPixelData"/> according to <seealso cref="PhotometricInterpretation"/></returns> public static IPixelData Create(DicomPixelData pixelData, int frame) { PhotometricInterpretation pi = pixelData.PhotometricInterpretation; if (pi == null) { // generally ACR-NEMA var samples = pixelData.SamplesPerPixel; if (samples == 0 || samples == 1) { pi = pixelData.Dataset.Contains(DicomTag.RedPaletteColorLookupTableData) ? PhotometricInterpretation.PaletteColor : PhotometricInterpretation.Monochrome2; } else { // assume, probably incorrectly, that the image is RGB pi = PhotometricInterpretation.Rgb; } } if (pixelData.BitsStored == 1) { if (pixelData.Dataset.GetSingleValue<DicomUID>(DicomTag.SOPClassUID) == DicomUID.MultiFrameSingleBitSecondaryCaptureImageStorage) // Multi-frame Single Bit Secondary Capture is stored LSB -> MSB return new SingleBitPixelData( pixelData.Width, pixelData.Height, PixelDataConverter.ReverseBits(pixelData.GetFrame(frame))); else // Need sample images to verify that this is correct return new SingleBitPixelData(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame)); } else if (pi == PhotometricInterpretation.Monochrome1 || pi == PhotometricInterpretation.Monochrome2 || pi == PhotometricInterpretation.PaletteColor) { if (pixelData.BitsAllocated == 8 && pixelData.HighBit == 7 && pixelData.BitsStored == 8) return new GrayscalePixelDataU8(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame)); else if (pixelData.BitsAllocated <= 16) { if (pixelData.PixelRepresentation == PixelRepresentation.Signed) return new GrayscalePixelDataS16( pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)); else return new GrayscalePixelDataU16( pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)); } else if (pixelData.BitsAllocated <= 32) { if (pixelData.PixelRepresentation == PixelRepresentation.Signed) return new GrayscalePixelDataS32( pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)); else return new GrayscalePixelDataU32( pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame)); } else throw new DicomImagingException( "Unsupported pixel data value for bits stored: {0}", pixelData.BitsStored); } else if (pi == PhotometricInterpretation.Rgb || pi == PhotometricInterpretation.YbrFull || pi == PhotometricInterpretation.YbrFull422 || pi == PhotometricInterpretation.YbrPartial422) { var buffer = pixelData.GetFrame(frame); if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar) buffer = PixelDataConverter.PlanarToInterleaved24(buffer); if (pi == PhotometricInterpretation.YbrFull) buffer = PixelDataConverter.YbrFullToRgb(buffer); else if (pi == PhotometricInterpretation.YbrFull422) buffer = PixelDataConverter.YbrFull422ToRgb(buffer, pixelData.Width); else if (pi == PhotometricInterpretation.YbrPartial422) buffer = PixelDataConverter.YbrPartial422ToRgb(buffer, pixelData.Width); return new ColorPixelData24(pixelData.Width, pixelData.Height, buffer); } else if (pi == PhotometricInterpretation.YbrFull422) { var buffer = pixelData.GetFrame(frame); if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar) throw new DicomImagingException("Unsupported planar configuration for YBR_FULL_422"); return new ColorPixelData24(pixelData.Width, pixelData.Height, buffer); } else { throw new DicomImagingException( "Unsupported pixel data photometric interpretation: {0}", pi.Value); } }
/// <summary> /// Create <see cref="IPixelData"/> form <see cref="DicomPixelData"/> /// according to the input <paramref name="pixelData"/> <seealso cref="PhotometricInterpretation"/> /// </summary> /// <param name="pixelData">Input pixel data</param> /// <param name="frame">Frame number (0 based)</param> /// <returns>Implementation of <seealso cref="IPixelData"/> according to <seealso cref="PhotometricInterpretation"/></returns> public static IPixelData Create(DicomPixelData pixelData, int frame) { PhotometricInterpretation pi = pixelData.PhotometricInterpretation; if (pi == null) { // generally ACR-NEMA var samples = pixelData.SamplesPerPixel; if (samples == 0 || samples == 1) { if (pixelData.Dataset.Contains(DicomTag.RedPaletteColorLookupTableData)) { pi = PhotometricInterpretation.PaletteColor; } else { pi = PhotometricInterpretation.Monochrome2; } } else { // assume, probably incorrectly, that the image is RGB pi = PhotometricInterpretation.Rgb; } } if (pi == PhotometricInterpretation.Monochrome1 || pi == PhotometricInterpretation.Monochrome2 || pi == PhotometricInterpretation.PaletteColor) { if (pixelData.BitsAllocated <= 8) { return(new GrayscalePixelDataU8(pixelData.Width, pixelData.Height, pixelData.GetFrame(frame))); } else if (pixelData.BitsAllocated <= 16) { if (pixelData.PixelRepresentation == PixelRepresentation.Signed) { return(new GrayscalePixelDataS16(pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame))); } else { return(new GrayscalePixelDataU16(pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame))); } } else if (pixelData.BitsAllocated <= 32) { if (pixelData.PixelRepresentation == PixelRepresentation.Signed) { return(new GrayscalePixelDataS32(pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame))); } else { return(new GrayscalePixelDataU32(pixelData.Width, pixelData.Height, pixelData.BitDepth, pixelData.GetFrame(frame))); } } else { throw new DicomImagingException("Unsupported pixel data value for bits stored: {0}", pixelData.BitsStored); } } else if (pi == PhotometricInterpretation.Rgb || pi == PhotometricInterpretation.YbrFull) { var buffer = pixelData.GetFrame(frame); if (pixelData.PlanarConfiguration == PlanarConfiguration.Planar) { buffer = PixelDataConverter.PlanarToInterleaved24(buffer); } return(new ColorPixelData24(pixelData.Width, pixelData.Height, buffer)); } else { throw new DicomImagingException("Unsupported pixel data photometric interpretation: {0}", pi.Value); } }
public PylonCapture() { source = Observable.Create <PylonDataFrame>((observer, cancellationToken) => { return(Task.Factory.StartNew(() => { lock (captureLock) { var configFile = ParameterFile; using (var camera = new Camera(SerialNumber)) using (var converter = new PixelDataConverter()) { camera.Open(); if (!string.IsNullOrEmpty(configFile)) { camera.Parameters.Load(configFile, ParameterPath.CameraDevice); } try { camera.StreamGrabber.ImageGrabbed += (sender, e) => { var result = e.GrabResult; if (result.IsValid) { int channels; IplDepth depth; PixelType outputFormat; var size = new Size(result.Width, result.Height); GetImageDepth(result.PixelTypeValue, out depth, out channels, out outputFormat); converter.OutputPixelFormat = outputFormat; var output = new IplImage(size, depth, channels); converter.Convert(output.ImageData, output.WidthStep * output.Height, result); observer.OnNext(new PylonDataFrame(output, result)); } }; camera.StreamGrabber.GrabStopped += (sender, e) => { if (e.Reason != GrabStopReason.UserRequest) { observer.OnError(new CaptureException(e.ErrorMessage)); } }; camera.Parameters[PLCamera.AcquisitionMode].SetValue(PLCamera.AcquisitionMode.Continuous); camera.StreamGrabber.Start(GrabStrategy, GrabLoop.ProvidedByStreamGrabber); cancellationToken.WaitHandle.WaitOne(); } finally { camera.StreamGrabber.Stop(); camera.Close(); } } } }, cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Default)); }) .PublishReconnectable() .RefCount(); }
public HObject BuffersToImage24PlanarColor(IGrabResult grabResult) { HObject Hobj; HOperatorSet.GenEmptyObj(out Hobj); try { int imageWidth = grabResult.Width - 1; int imageHeight = grabResult.Height - 1; int payloadSize = imageWidth * imageHeight; PixelDataConverter converter = new PixelDataConverter(); //// 设置最后一行和一列不进行Bayer转换 converter.Parameters[PLPixelDataConverter.InconvertibleEdgeHandling].SetValue("Clip"); converter.OutputPixelFormat = PixelType.BGR8packed; byte[] buffer = new byte[payloadSize * 3]; converter.Convert(buffer, grabResult); Bitmap bitmap = new Bitmap(imageWidth, imageHeight, PixelFormat.Format24bppRgb); BitmapData bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat); try { IntPtr ptrBmp = bmpData.Scan0; int imageStride = imageWidth * 3; if (imageStride == bmpData.Stride) { Marshal.Copy(buffer, 0, ptrBmp, bmpData.Stride * bitmap.Height); } else { /* The widths in bytes are not equal, copy line by line. * This can happen if the image width is not divisible by four.*/ for (int i = 0; i < bitmap.Height; ++i) { Marshal.Copy(buffer, i * imageStride, new IntPtr(ptrBmp.ToInt64() + i * bmpData.Stride), imageWidth * 3); } } int width = grabResult.Width; int height = grabResult.Height; unsafe { int count = height * width; ////计算3通道图像指针 byte[] r = new byte[count]; byte[] g = new byte[count]; byte[] b = new byte[count]; byte * r1 = null; byte * g1 = null; byte * b1 = null; Marshal.Copy((IntPtr)r1, r, 0, r.Length); Marshal.Copy((IntPtr)g1, g, 0, r.Length); Marshal.Copy((IntPtr)b1, b, 0, r.Length); byte *r_c1 = r1; byte *g_c1 = g1; byte *b_c1 = b1; byte *p1 = (byte *)bmpData.Scan0; //B G R A ->R G B for (int i = height - 1; i >= 0; i--) { for (int j = 0; j < width; j = j + 4) { //R channel *r_c1 = p1[i * width + (j + 2)]; r_c1++; *g_c1 = p1[i * width + (j + 1)]; ++g_c1; *b_c1 = p1[i * width + (j + 0)]; ++b_c1; } } HOperatorSet.GenImage3(out Hobj, "byte", width, height, new IntPtr(r1), new IntPtr(g1), new IntPtr(b1)); } } finally { bitmap.UnlockBits(bmpData); } return(Hobj); } catch (Exception ex) { return(null); } }
public BaslerCamera() { converter = new PixelDataConverter(); }
public override unsafe void Encode(DicomPixelData oldPixelData, DicomPixelData newPixelData, DicomCodecParams parameters) { if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux) && !RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { throw new InvalidOperationException("Unsupported OS Platform"); } if ((oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrPartial422) || (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrPartial420)) { throw new DicomCodecException("Photometric Interpretation '{0}' not supported by JPEG-LS encoder", oldPixelData.PhotometricInterpretation); } DicomJpegLsParams jparams = (DicomJpegLsParams)parameters; if (jparams == null) { jparams = (DicomJpegLsParams)GetDefaultParameters(); } //IMPORT JLSPARAMETERS (DLLIMPORT) JlsParameters jls = new JlsParameters { width = oldPixelData.Width, height = oldPixelData.Height, bitsPerSample = oldPixelData.BitsStored, stride = oldPixelData.BytesAllocated * oldPixelData.Width * oldPixelData.SamplesPerPixel, components = oldPixelData.SamplesPerPixel, interleaveMode = oldPixelData.SamplesPerPixel == 1 ? CharlsInterleaveModeType.None : oldPixelData.PlanarConfiguration == PlanarConfiguration.Interleaved ? CharlsInterleaveModeType.Sample : CharlsInterleaveModeType.Line, colorTransformation = CharlsColorTransformationType.None }; if (TransferSyntax == DicomTransferSyntax.JPEGLSNearLossless) { jls.allowedLossyError = jparams.AllowedError; } for (int frame = 0; frame < oldPixelData.NumberOfFrames; frame++) { IByteBuffer frameData = oldPixelData.GetFrame(frame); //Converting photmetricinterpretation YbrFull or YbrFull422 to RGB if (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrFull) { frameData = PixelDataConverter.YbrFullToRgb(frameData); oldPixelData.PhotometricInterpretation = PhotometricInterpretation.Rgb; } else if (oldPixelData.PhotometricInterpretation == PhotometricInterpretation.YbrFull422) { frameData = PixelDataConverter.YbrFull422ToRgb(frameData, oldPixelData.Width); oldPixelData.PhotometricInterpretation = PhotometricInterpretation.Rgb; } PinnedByteArray frameArray = new PinnedByteArray(frameData.Data); byte[] jpegData = new byte[frameData.Size]; PinnedByteArray jpegArray = new PinnedByteArray(jpegData); uint jpegDataSize = 0; char[] errorMessage = new char[256]; // IMPORT JpegLsEncode unsafe { if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { CharlsApiResultType err = JpegLSEncode_Linux64((void *)jpegArray.Pointer, checked ((uint)jpegArray.Count), &jpegDataSize, (void *)frameArray.Pointer, checked ((uint)frameArray.Count), ref jls, errorMessage); } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { CharlsApiResultType err = JpegLSEncode_Windows64((void *)jpegArray.Pointer, checked ((uint)jpegArray.Count), &jpegDataSize, (void *)frameArray.Pointer, checked ((uint)frameArray.Count), ref jls, errorMessage); } Array.Resize(ref jpegData, (int)jpegDataSize); IByteBuffer buffer; if (jpegDataSize >= (1 * 1024 * 1024) || oldPixelData.NumberOfFrames > 1) { buffer = new TempFileBuffer(jpegData); } else { buffer = new MemoryByteBuffer(jpegData); } buffer = EvenLengthBuffer.Create(buffer); newPixelData.AddFrame(buffer); } } }