public override Task <InitResult> Init(InitParameters request, ServerCallContext context) { InitResult response = new InitResult(); response.Status = request.Id; return(Task.FromResult(response)); }
public MyWrapper(InitParameters p) { lock (lockRoot) { if (!initialized) { UnManagedStaticClass.Initialize(p); initialized = true; } } }
public Zed(InitParameters parameters = null, string svoPath = null) { camera = ZEDCamera.GetInstance(); mats = new List <ZEDMat>(); if (parameters == null) { parameters = new InitParameters(); parameters.resolution = RESOLUTION.HD720; parameters.depthMode = DEPTH_MODE.QUALITY; parameters.depthStabilization = true; parameters.enableRightSideMeasure = true; // isStereoRig; parameters.coordinateUnit = UNIT.MILLIMETER; parameters.depthMinimumDistance = 200f; //parameters.depthMinimumDistance = 0.2f; } if (svoPath != null) { parameters.pathSVO = svoPath; } this.initParameters = parameters; // runtime parameters runtimeParameters = new RuntimeParameters() { sensingMode = SENSING_MODE.FILL, enableDepth = true }; // create the camera camera.CreateCamera(true); }
static void Main(string[] args) { Camera zed = new Camera(0); InitParameters initParameters = new InitParameters() { sdkVerbose = true, resolution = RESOLUTION.HD720, depthMode = DEPTH_MODE.NONE }; parseArgs(args, ref initParameters); ERROR_CODE returnedState = zed.Open(ref initParameters); if (returnedState != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } string winName = "Camera control"; Cv2.NamedWindow(winName); Console.WriteLine("ZED Model : " + zed.GetCameraModel()); Console.WriteLine("ZED Serial Number : " + zed.GetZEDSerialNumber()); Console.WriteLine("ZED Camera Firmware : " + zed.GetCameraFirmwareVersion()); Console.WriteLine("ZED Camera Resolution : " + zed.GetInitParameters().resolution); Console.WriteLine("ZED Camera FPS : " + zed.GetInitParameters().cameraFPS); // Print help control printHelp(); sl.Mat zedImage = new sl.Mat(); zedImage.Create(new Resolution((uint)zed.ImageWidth, (uint)zed.ImageHeight), MAT_TYPE.MAT_8U_C4); // Initialise camera setting switchCameraSettings(); char key = ' '; RuntimeParameters rtParams = new RuntimeParameters(); while (key != 'q') { // Check that a new image is successfully acquired returnedState = zed.Grab(ref rtParams); if (returnedState == ERROR_CODE.SUCCESS) { //Retrieve left image zed.RetrieveImage(zedImage, VIEW.LEFT); // Convert to cvMat OpenCvSharp.Mat cvImage = new OpenCvSharp.Mat(zedImage.GetHeight(), zedImage.GetWidth(), OpenCvSharp.MatType.CV_8UC4, zedImage.GetPtr()); Cv2.ImShow(winName, cvImage); } else { Console.WriteLine("ERROR during capture"); break; } key = (char)Cv2.WaitKey(10); // Change camera settings with keyboard updateCameraSettings(key, ref zed); } }
static void Main(string[] args) { if (args.Length < 1) { Console.WriteLine("Usage : Only the path of the output SVO file should be passed as argument."); Environment.Exit(-1); } // Create ZED Camera Camera zed = new Camera(0); Console.CancelKeyPress += delegate { Console.WriteLine("close"); zed.DisableRecording(); zed.Close(); }; //Specify SVO path parameters InitParameters initParameters = new InitParameters() { resolution = RESOLUTION.HD2K, depthMode = DEPTH_MODE.NONE, }; ERROR_CODE state = zed.Open(ref initParameters); if (state != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } string pathOutput = args[0]; RecordingParameters recordingParams = new RecordingParameters(pathOutput, SVO_COMPRESSION_MODE.H264_BASED, 8000, 15, false); state = zed.EnableRecording(recordingParams); if (state != ERROR_CODE.SUCCESS) { zed.Close(); Environment.Exit(-1); } // Start recording SVO, stop with Q Console.WriteLine("SVO is recording, press Q to stop"); int framesRecorded = 0; RuntimeParameters rtParams = new RuntimeParameters(); while (true) { if (zed.Grab(ref rtParams) == ERROR_CODE.SUCCESS) { // Each new frame is added to the SVO file framesRecorded++; Console.WriteLine("Frame count: " + framesRecorded); } bool State = (System.Windows.Input.Keyboard.IsKeyDown(System.Windows.Input.Key.Q) == true); if (State) { break; } } // Stop recording zed.DisableRecording(); zed.Close(); }
static void Main(string[] args) { if (args.Length != 1) { Console.WriteLine("Usage: "); Console.WriteLine(" ZED_SVO_Playback <SVO_file> "); Console.WriteLine("* *SVO file is mandatory in the application * *"); Environment.Exit(-1); } // Create ZED Camera Camera zed = new Camera(0); //Specify SVO path parameters InitParameters initParameters = new InitParameters() { inputType = INPUT_TYPE.SVO, pathSVO = args[0], svoRealTimeMode = true, depthMode = DEPTH_MODE.PERFORMANCE }; ERROR_CODE state = zed.Open(ref initParameters); if (state != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } Resolution resolution = zed.GetCalibrationParameters().leftCam.resolution; // Define OpenCV window size (resize to max 720/404) Resolution lowResolution = new Resolution((uint)Math.Min(720, (int)resolution.width) * 2, (uint)Math.Min(404, (int)resolution.height)); sl.Mat svoImage = new sl.Mat(); svoImage.Create(lowResolution, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat svoImageOCV = SLMat2CVMat(ref svoImage, MAT_TYPE.MAT_8U_C4); //Setup key, images, times char key = ' '; Console.WriteLine("Press 's' to save SVO image as PNG"); Console.WriteLine("Press 'f' to jump forward in the video"); Console.WriteLine("Press 'b' to jump backard in the video"); Console.WriteLine("Press 'q' to exit..."); int svoFrameRate = zed.GetInitParameters().cameraFPS; int nbFrames = zed.GetSVONumberOfFrames(); Console.WriteLine("[INFO] SVO contains " + nbFrames + " frames"); RuntimeParameters rtParams = new RuntimeParameters(); // Start SVO Playback while (key != 'q') { state = zed.Grab(ref rtParams); if (state == ERROR_CODE.SUCCESS) { //Get the side by side image zed.RetrieveImage(svoImage, VIEW.SIDE_BY_SIDE, MEM.CPU, lowResolution); int svoPosition = zed.GetSVOPosition(); //Display the frame Cv2.ImShow("View", svoImageOCV); key = (char)Cv2.WaitKey(10); switch (key) { case 's': svoImage.Write("capture" + svoPosition + ".png"); break; case 'f': zed.SetSVOPosition(svoPosition + svoFrameRate); break; case 'b': zed.SetSVOPosition(svoPosition - svoFrameRate); break; } ProgressBar((float)svoPosition / (float)nbFrames, 30); } else if (zed.GetSVOPosition() >= nbFrames - (zed.GetInitParameters().svoRealTimeMode ? 2 : 1)) { Console.WriteLine("SVO end has been reached. Looping back to 0"); zed.SetSVOPosition(0); } else { Console.WriteLine("Grab Error : " + state); break; } } zed.Close(); }
static void Main(string[] args) { if (args.Length != 3) { Console.WriteLine("Usage: "); Console.WriteLine(" ZED_SVO_Export A B C "); Console.WriteLine("Please use the following parameters from the command line:"); Console.WriteLine(" A - SVO file path (input) : \"path/to/file.svo\""); Console.WriteLine(" B - AVI file path (output) or image sequence folder(output) : \"path/to/output/file.avi\" or \"path/to/output/folder\""); Console.WriteLine(" C - Export mode: 0=Export LEFT+RIGHT AVI."); Console.WriteLine(" 1=Export LEFT+DEPTH_VIEW AVI."); Console.WriteLine(" 2=Export LEFT+RIGHT image sequence."); Console.WriteLine(" 3=Export LEFT+DEPTH_VIEW image sequence."); Console.WriteLine(" 4=Export LEFT+DEPTH_16Bit image sequence."); Console.WriteLine(" A and B need to end with '/' or '\\'\n\n"); Console.WriteLine("Examples: \n"); Console.WriteLine(" (AVI LEFT+RIGHT) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/file.avi\" 0"); Console.WriteLine(" (AVI LEFT+DEPTH) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/file.avi\" 1"); Console.WriteLine(" (SEQUENCE LEFT+RIGHT) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 2"); Console.WriteLine(" (SEQUENCE LEFT+DEPTH) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 3"); Console.WriteLine(" (SEQUENCE LEFT+DEPTH_16Bit) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 4"); Environment.Exit(-1); } string svoInputPath = args[0]; string outputPath = args[1]; bool outputAsVideo = true; APP_TYPE appType = APP_TYPE.LEFT_AND_RIGHT; if (args[2].Equals("1") || args[2].Equals("3")) { appType = APP_TYPE.LEFT_AND_DEPTH; } if (args[2].Equals("4")) { appType = APP_TYPE.LEFT_AND_DEPTH_16; } // Check if exporting to AVI or SEQUENCE if (!args[2].Equals("0") && !args[2].Equals("1")) { outputAsVideo = false; } if (!outputAsVideo && !Directory.Exists(outputPath)) { Console.WriteLine("Input directory doesn't exist. Check permissions or create it. " + outputPath); Environment.Exit(-1); } if (!outputAsVideo && outputPath.Substring(outputPath.Length - 1) != "/" && outputPath.Substring(outputPath.Length - 1) != "\\") { Console.WriteLine("Error: output folder needs to end with '/' or '\\'." + outputPath); Environment.Exit(-1); } // Create ZED Camera Camera zed = new Camera(0); //Specify SVO path parameters InitParameters initParameters = new InitParameters() { inputType = INPUT_TYPE.SVO, pathSVO = svoInputPath, svoRealTimeMode = true, coordinateUnits = UNIT.MILLIMETER }; ERROR_CODE zedOpenState = zed.Open(ref initParameters); if (zedOpenState != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } Resolution imageSize = zed.GetCalibrationParameters().leftCam.resolution; sl.Mat leftImage = new sl.Mat(); leftImage.Create(imageSize, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat leftImageOCV = SLMat2CVMat(ref leftImage, MAT_TYPE.MAT_8U_C4); sl.Mat rightImage = new sl.Mat(); rightImage.Create(imageSize, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat rightImageOCV = SLMat2CVMat(ref rightImage, MAT_TYPE.MAT_8U_C4); sl.Mat depthImage = new sl.Mat(); depthImage.Create(imageSize, MAT_TYPE.MAT_32F_C1); OpenCvSharp.Mat depthImageOCV = SLMat2CVMat(ref depthImage, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat imageSideBySide = new OpenCvSharp.Mat(); if (outputAsVideo) { imageSideBySide = new OpenCvSharp.Mat((int)imageSize.height, (int)imageSize.width * 2, OpenCvSharp.MatType.CV_8UC3); } OpenCvSharp.VideoWriter videoWriter = new OpenCvSharp.VideoWriter(); //Create Video writter if (outputAsVideo) { int fourcc = OpenCvSharp.VideoWriter.FourCC('M', '4', 'S', '2'); // MPEG-4 part 2 codec int frameRate = Math.Max(zed.GetInitParameters().cameraFPS, 25); // Minimum write rate in OpenCV is 25 Console.WriteLine(outputPath); videoWriter.Open(outputPath, fourcc, frameRate, new OpenCvSharp.Size((int)imageSize.width * 2, (int)imageSize.height)); if (!videoWriter.IsOpened()) { Console.WriteLine("Error: OpenCV video writer cannot be opened. Please check the .avi file path and write permissions."); zed.Close(); Environment.Exit(-1); } } RuntimeParameters rtParams = new RuntimeParameters(); rtParams.sensingMode = SENSING_MODE.FILL; // Start SVO conversion to AVI/SEQUENCE Console.WriteLine("Converting SVO... press Q to interupt conversion"); int nbFrames = zed.GetSVONumberOfFrames(); int svoPosition = 0; zed.SetSVOPosition(svoPosition); while (!exit_app) { exit_app = (System.Windows.Input.Keyboard.IsKeyDown(System.Windows.Input.Key.Q) == true); ERROR_CODE err = zed.Grab(ref rtParams); if (err == ERROR_CODE.SUCCESS) { svoPosition = zed.GetSVOPosition(); // Retrieve SVO images zed.RetrieveImage(leftImage, VIEW.LEFT); switch (appType) { case APP_TYPE.LEFT_AND_RIGHT: zed.RetrieveImage(rightImage, VIEW.RIGHT); break; case APP_TYPE.LEFT_AND_DEPTH: zed.RetrieveImage(rightImage, VIEW.DEPTH); break; case APP_TYPE.LEFT_AND_DEPTH_16: zed.RetrieveMeasure(depthImage, MEASURE.DEPTH); break; default: break; } if (outputAsVideo) { // Convert SVO image from RGBA to RGB Cv2.CvtColor(leftImageOCV, imageSideBySide[new OpenCvSharp.Rect(0, 0, (int)imageSize.width, (int)imageSize.height)], ColorConversionCodes.BGRA2BGR); Cv2.CvtColor(rightImageOCV, imageSideBySide[new OpenCvSharp.Rect((int)imageSize.width, 0, (int)imageSize.width, (int)imageSize.height)], ColorConversionCodes.BGRA2BGR); // Write the RGB image in the video videoWriter.Write(imageSideBySide); } else { // Generate filenames string filename1 = ""; filename1 = outputPath + "/left" + svoPosition + ".png"; string filename2 = ""; filename2 = outputPath + (appType == APP_TYPE.LEFT_AND_RIGHT ? "/right" : "/depth") + svoPosition + ".png"; // Save Left images Cv2.ImWrite(filename1, leftImageOCV); //Save depth if (appType != APP_TYPE.LEFT_AND_DEPTH_16) { Cv2.ImWrite(filename2, rightImageOCV); } else { //Convert to 16 bit OpenCvSharp.Mat depth16 = new OpenCvSharp.Mat(); depthImageOCV.ConvertTo(depth16, MatType.CV_16UC1); Cv2.ImWrite(filename2, depth16); } } // Display Progress ProgressBar((float)svoPosition / (float)nbFrames, 30); } else if (zed.GetSVOPosition() >= nbFrames - (zed.GetInitParameters().svoRealTimeMode ? 2 : 1)) { Console.WriteLine("SVO end has been reached. Exiting now."); Environment.Exit(-1); exit_app = true; } else { Console.WriteLine("Grab Error : " + err); exit_app = true; } } if (outputAsVideo) { //Close the video writer videoWriter.Release(); } zed.Close(); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 10f; init_params.cameraDisableSelfCalib = true; maxDepthDistance = init_params.depthMaximumDistance; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } // Enable tracking (mandatory for object detection) Quaternion quat = Quaternion.Identity; Vector3 vec = Vector3.Zero; zedCamera.EnablePositionalTracking(ref quat, ref vec); runtimeParameters = new RuntimeParameters(); // Enable the Objects detection module ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters(); obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis isTrackingON = obj_det_params.enableObjectTracking; obj_det_params.enable2DMask = false; obj_det_params.imageSync = true; // the object detection is synchronized to the image obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE; if (USE_BATCHING) { batchParameters = new BatchParameters(); batchParameters.latency = 2.0f; batchParameters.enable = true; batchHandler = new BatchSystemHandler((int)batchParameters.latency * 2); obj_det_params.batchParameters = batchParameters; } zedCamera.EnableObjectDetection(ref obj_det_params); // Configure object detection runtime parameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); detection_confidence = 60; obj_runtime_parameters.detectionConfidenceThreshold = detection_confidence; obj_runtime_parameters.objectClassFilter = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true); //obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(true); // To set a specific threshold obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = detection_confidence; //obj_runtime_parameters.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = detection_confidence; // Create ZED Objects filled in the main loop objects = new Objects(); imageLeft = new sl.Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720)); Resolution tracksRes = new Resolution(400, (uint)displayRes.height); // create a global image to store both image and tracks view globalImage = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width + (int)tracksRes.width, OpenCvSharp.MatType.CV_8UC4); // retrieve ref on image part imageLeftOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect(0, 0, (int)displayRes.width, (int)displayRes.height)); // retrieve ref on tracks part imageTrackOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect((int)displayRes.width, 0, (int)tracksRes.width, (int)tracksRes.height)); // init an sl::Mat from the ocv image ref (which is in fact the memory of global_image) imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU); imageRenderLeft = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr()); imgScale = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height); // Create OpenGL Viewer viewer = new GLViewer(); camWorldPose = new Pose(); camCameraPose = new Pose(); pointCloud = new sl.Mat(); pcRes = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404)); pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU); // 2D tracks trackViewGenerator = new TrackingViewer(tracksRes, (int)zedCamera.GetCameraFPS(), maxDepthDistance, 3); trackViewGenerator.setCameraCalibration(zedCamera.GetCalibrationParameters()); window_name = "ZED| 2D View and Birds view"; Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window Cv2.CreateTrackbar("Confidence", window_name, ref detection_confidence, 100); // Create OpenGL window CreateWindow(); }
public int LoadLogResult(string strErrorMessage) //Padrão MQS de log { strErrorMessage = string.Empty; int nStatus = 0; string strStationCode; string strStnId; string strStnLine; StringBuilder strStationId = new StringBuilder(56); StringBuilder strStationLine = new StringBuilder(56); StringBuilder strStationType = new StringBuilder(56); StringBuilder strMQSDataFeedLogPath = new StringBuilder(56); StringBuilder strMQSDataTempLogPath = new StringBuilder(56); StringBuilder strLogFormat = new StringBuilder(256); StringBuilder strServerURL = new StringBuilder(256); CItemListEquip.GetStationLine(strStationLine); CItemListEquip.GetStationType(strStationType); CItemListEquip.GetStationID(strStationId); CItemListEquip.GetMQSDataFeedLogPath(strMQSDataFeedLogPath); CItemListEquip.GetMQSDataTempLogPath(strMQSDataTempLogPath); CItemListEquip.GetAddTestLogFormat(strLogFormat); CItemListEquip.GetAddTestURL(strServerURL); strStnId = strStationId.ToString(); strStnLine = strStationLine.ToString(); strStationCode = strStnLine + "-" + strStnId; InitParameters parameters = new InitParameters(); parameters.computerMacAddress = GetMyMACAddress(); parameters.fixtureId = "*"; parameters.processCode = strStationType.ToString(); parameters.siteCode = "JAG"; parameters.softwareReleaseVersion = "V1.0"; parameters.stationCode = strStationCode; parameters.headerVersion = "TH4"; parameters.testHeaderVersion = "TR1"; parameters.logFormat = LogFileFormat.MQS; parameters.feedPath = strMQSDataFeedLogPath.ToString(); parameters.temporaryPath = strMQSDataTempLogPath.ToString(); if (CItemListEquip.IsLogData() == 0) // Para salvar o log LOcal { parameters.feedPath = @"C:SambaDtv\Prod\temp"; } if (strLogFormat.ToString().Equals("MQS")) // Tipos de lOG ...FF { parameters.logFormat = LogFileFormat.MQS; } else if (strLogFormat.ToString().Equals("MQS_AND_HTTP_POST")) { parameters.logFormat = LogFileFormat.MQS_AND_HTTP_POST; } else if (strLogFormat.ToString().Equals("HTTP_POST")) { parameters.logFormat = LogFileFormat.HTTP_POST; } else { // TODO: need correction //MessageBox.Show("FUNCTION LoadLogResult() Error:\n\rLog Format not valid-->" + strLogFormat.ToString()); //Application.Exit(); } if (strLogFormat.ToString().Contains("HTTP")) { if (strServerURL.Length == 0) { // TODO: need correction //MessageBox.Show("FUNCTION LoadLogResult() Error:\n\rHTTP POST URL Not Valid-->" + strServerURL.ToString()); //Application.Exit(); } parameters.serverUri = @strServerURL.ToString(); //HTTP JAG //parameters.serverUri = @"http://jagnt001.americas.ad.flextronics.com/FF_Http_AutoTester/default.aspx"; } nStatus = logProcess.Init(parameters, out strErrorMessage); return(nStatus); }
public MyWrapper(InitParameters initParameters, Settings settings) { this.initParameters = initParameters; this.settings = settings; }
public MainWindow() { InitializeComponent(); // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.depthMode = DEPTH_MODE.ULTRA; init_params.resolution = RESOLUTION.HD1080; // Open the camera ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Create a ZEDMat to handle the images Mat zedMat = new Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; Resolution res = new Resolution((uint)Width, (uint)Height); zedMat.Create(res, MAT_TYPE.MAT_8U_C4, MEM.CPU); // Create a writeable bitmap to handle the image we need to show WriteableBitmap writeableBitmap = new WriteableBitmap(Width, Height, 96, 96, PixelFormats.Bgra32, null); Image.Source = writeableBitmap; int stride = (Width * writeableBitmap.Format.BitsPerPixel + 7) / 8; int bufferSize = Height * stride; byte[] Pixels; Int32Rect rect = new Int32Rect(0, 0, Width, Height); Task.Factory.StartNew(() => { while (!tokenSource.Token.IsCancellationRequested) { isRunning = true; // Grab an image if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { // A new image is available if grab() returns ERROR_CODE::SUCCESS if (zedMat.IsInit()) { err = zedCamera.RetrieveImage(zedMat, view, MEM.CPU, res); } if (err == ERROR_CODE.SUCCESS) { // Convert IntPtr buffer into byte array Pixels = ZEDMat2ByteArray(zedMat); // Draw on Renderer Dispatcher.BeginInvoke(new Action(() => { writeableBitmap.Lock(); writeableBitmap.WritePixels(rect, Pixels, stride, 0); writeableBitmap.AddDirtyRect(rect); writeableBitmap.Unlock(); }), DispatcherPriority.Render); } } isRunning = false; } }, tokenSource.Token); }