void ingestDepthInMap(ulong ts, ref sl.Mat depth) { sl.Mat newDepth = new Mat(); newDepth.Create(depth.GetResolution(), MAT_TYPE.MAT_32F_C4); depth.CopyTo(newDepth); depthMap_ms[Utils.getMilliseconds(ts)] = newDepth; List <ulong> removals = new List <ulong>(); foreach (ulong key in depthMap_ms.Keys) { if (key < Utils.getMilliseconds(ts) - (ulong)batchDataRetention * 1000 * 2) { removals.Add(key); } } foreach (ulong key in removals) { depthMap_ms[key].Free(); depthMap_ms.Remove(key); } }
void ingestImageInMap(ulong ts, ref sl.Mat image) { sl.Mat newImage = new Mat(); newImage.Create(image.GetResolution(), MAT_TYPE.MAT_8U_C4); image.CopyTo(newImage); imageMap_ms[Utils.getMilliseconds(ts)] = newImage; List <ulong> removals = new List <ulong>(); foreach (ulong key in imageMap_ms.Keys) { if (key < Utils.getMilliseconds(ts) - (ulong)batchDataRetention * 1000 * 2) { removals.Add(key); } } foreach (ulong key in removals) { imageMap_ms[key].Free(); imageMap_ms.Remove(key); } }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 15f; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } // Enable tracking (mandatory for object detection) Quaternion quat = Quaternion.Identity; Vector3 vec = Vector3.Zero; zedCamera.EnablePositionalTracking(ref quat, ref vec); runtimeParameters = new RuntimeParameters(); // Enable the Objects detection module ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters(); obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis obj_det_params.enable2DMask = false; obj_det_params.enableBodyFitting = true; // smooth skeletons moves obj_det_params.imageSync = true; // the object detection is synchronized to the image obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE; zedCamera.EnableObjectDetection(ref obj_det_params); // Create ZED Objects filled in the main loop object_frame = new Objects(); zedMat = new Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; Resolution res = new Resolution((uint)Width, (uint)Height); zedMat.Create(res, MAT_TYPE.MAT_8U_C4, MEM.CPU); // Create OpenGL Viewer viewer = new GLViewer(new Resolution((uint)Width, (uint)Height)); // Configure object detection runtime parameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); obj_runtime_parameters.detectionConfidenceThreshold = 35; obj_runtime_parameters.objectClassFilter = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true); // To set a specific threshold obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = 35; // Create OpenGL window CreateWindow(); }
static void Main(string[] args) { Camera zed = new Camera(0); InitParameters initParameters = new InitParameters() { sdkVerbose = true, resolution = RESOLUTION.HD720, depthMode = DEPTH_MODE.NONE }; parseArgs(args, ref initParameters); ERROR_CODE returnedState = zed.Open(ref initParameters); if (returnedState != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } string winName = "Camera control"; Cv2.NamedWindow(winName); Console.WriteLine("ZED Model : " + zed.GetCameraModel()); Console.WriteLine("ZED Serial Number : " + zed.GetZEDSerialNumber()); Console.WriteLine("ZED Camera Firmware : " + zed.GetCameraFirmwareVersion()); Console.WriteLine("ZED Camera Resolution : " + zed.GetInitParameters().resolution); Console.WriteLine("ZED Camera FPS : " + zed.GetInitParameters().cameraFPS); // Print help control printHelp(); sl.Mat zedImage = new sl.Mat(); zedImage.Create(new Resolution((uint)zed.ImageWidth, (uint)zed.ImageHeight), MAT_TYPE.MAT_8U_C4); // Initialise camera setting switchCameraSettings(); char key = ' '; RuntimeParameters rtParams = new RuntimeParameters(); while (key != 'q') { // Check that a new image is successfully acquired returnedState = zed.Grab(ref rtParams); if (returnedState == ERROR_CODE.SUCCESS) { //Retrieve left image zed.RetrieveImage(zedImage, VIEW.LEFT); // Convert to cvMat OpenCvSharp.Mat cvImage = new OpenCvSharp.Mat(zedImage.GetHeight(), zedImage.GetWidth(), OpenCvSharp.MatType.CV_8UC4, zedImage.GetPtr()); Cv2.ImShow(winName, cvImage); } else { Console.WriteLine("ERROR during capture"); break; } key = (char)Cv2.WaitKey(10); // Change camera settings with keyboard updateCameraSettings(key, ref zed); } }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.sdkVerbose = true; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } findPlaneStatus = ERROR_CODE.FAILURE; tracking_state = POSITIONAL_TRACKING_STATE.OFF; hasIMU = zedCamera.GetSensorsConfiguration().gyroscope_parameters.isAvailable; userAction = new UserAction(); // Enable tracking PositionalTrackingParameters positionalTrackingParameters = new PositionalTrackingParameters(); zedCamera.EnablePositionalTracking(ref positionalTrackingParameters); runtimeParameters = new RuntimeParameters(); runtimeParameters.measure3DReferenceFrame = REFERENCE_FRAME.WORLD; // Create ZED Objects filled in the main loop zedMat = new Mat(); cam_pose = new Pose(); //Create mesh. planeMeshTriangles = new int[65000]; planeMeshVertices = new Vector3[65000]; plane = new PlaneData(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; Resolution res = new Resolution((uint)Width, (uint)Height); zedMat.Create(res, MAT_TYPE.MAT_8U_C4, MEM.CPU); // Create OpenGL Viewer viewer = new GLViewer(new Resolution((uint)Width, (uint)Height)); // Create OpenGL window CreateWindow(); }
static void Main(string[] args) { if (args.Length != 1) { Console.WriteLine("Usage: "); Console.WriteLine(" ZED_SVO_Playback <SVO_file> "); Console.WriteLine("* *SVO file is mandatory in the application * *"); Environment.Exit(-1); } // Create ZED Camera Camera zed = new Camera(0); //Specify SVO path parameters InitParameters initParameters = new InitParameters() { inputType = INPUT_TYPE.SVO, pathSVO = args[0], svoRealTimeMode = true, depthMode = DEPTH_MODE.PERFORMANCE }; ERROR_CODE state = zed.Open(ref initParameters); if (state != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } Resolution resolution = zed.GetCalibrationParameters().leftCam.resolution; // Define OpenCV window size (resize to max 720/404) Resolution lowResolution = new Resolution((uint)Math.Min(720, (int)resolution.width) * 2, (uint)Math.Min(404, (int)resolution.height)); sl.Mat svoImage = new sl.Mat(); svoImage.Create(lowResolution, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat svoImageOCV = SLMat2CVMat(ref svoImage, MAT_TYPE.MAT_8U_C4); //Setup key, images, times char key = ' '; Console.WriteLine("Press 's' to save SVO image as PNG"); Console.WriteLine("Press 'f' to jump forward in the video"); Console.WriteLine("Press 'b' to jump backard in the video"); Console.WriteLine("Press 'q' to exit..."); int svoFrameRate = zed.GetInitParameters().cameraFPS; int nbFrames = zed.GetSVONumberOfFrames(); Console.WriteLine("[INFO] SVO contains " + nbFrames + " frames"); RuntimeParameters rtParams = new RuntimeParameters(); // Start SVO Playback while (key != 'q') { state = zed.Grab(ref rtParams); if (state == ERROR_CODE.SUCCESS) { //Get the side by side image zed.RetrieveImage(svoImage, VIEW.SIDE_BY_SIDE, MEM.CPU, lowResolution); int svoPosition = zed.GetSVOPosition(); //Display the frame Cv2.ImShow("View", svoImageOCV); key = (char)Cv2.WaitKey(10); switch (key) { case 's': svoImage.Write("capture" + svoPosition + ".png"); break; case 'f': zed.SetSVOPosition(svoPosition + svoFrameRate); break; case 'b': zed.SetSVOPosition(svoPosition - svoFrameRate); break; } ProgressBar((float)svoPosition / (float)nbFrames, 30); } else if (zed.GetSVOPosition() >= nbFrames - (zed.GetInitParameters().svoRealTimeMode ? 2 : 1)) { Console.WriteLine("SVO end has been reached. Looping back to 0"); zed.SetSVOPosition(0); } else { Console.WriteLine("Grab Error : " + state); break; } } zed.Close(); }
static void Main(string[] args) { if (args.Length != 3) { Console.WriteLine("Usage: "); Console.WriteLine(" ZED_SVO_Export A B C "); Console.WriteLine("Please use the following parameters from the command line:"); Console.WriteLine(" A - SVO file path (input) : \"path/to/file.svo\""); Console.WriteLine(" B - AVI file path (output) or image sequence folder(output) : \"path/to/output/file.avi\" or \"path/to/output/folder\""); Console.WriteLine(" C - Export mode: 0=Export LEFT+RIGHT AVI."); Console.WriteLine(" 1=Export LEFT+DEPTH_VIEW AVI."); Console.WriteLine(" 2=Export LEFT+RIGHT image sequence."); Console.WriteLine(" 3=Export LEFT+DEPTH_VIEW image sequence."); Console.WriteLine(" 4=Export LEFT+DEPTH_16Bit image sequence."); Console.WriteLine(" A and B need to end with '/' or '\\'\n\n"); Console.WriteLine("Examples: \n"); Console.WriteLine(" (AVI LEFT+RIGHT) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/file.avi\" 0"); Console.WriteLine(" (AVI LEFT+DEPTH) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/file.avi\" 1"); Console.WriteLine(" (SEQUENCE LEFT+RIGHT) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 2"); Console.WriteLine(" (SEQUENCE LEFT+DEPTH) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 3"); Console.WriteLine(" (SEQUENCE LEFT+DEPTH_16Bit) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 4"); Environment.Exit(-1); } string svoInputPath = args[0]; string outputPath = args[1]; bool outputAsVideo = true; APP_TYPE appType = APP_TYPE.LEFT_AND_RIGHT; if (args[2].Equals("1") || args[2].Equals("3")) { appType = APP_TYPE.LEFT_AND_DEPTH; } if (args[2].Equals("4")) { appType = APP_TYPE.LEFT_AND_DEPTH_16; } // Check if exporting to AVI or SEQUENCE if (!args[2].Equals("0") && !args[2].Equals("1")) { outputAsVideo = false; } if (!outputAsVideo && !Directory.Exists(outputPath)) { Console.WriteLine("Input directory doesn't exist. Check permissions or create it. " + outputPath); Environment.Exit(-1); } if (!outputAsVideo && outputPath.Substring(outputPath.Length - 1) != "/" && outputPath.Substring(outputPath.Length - 1) != "\\") { Console.WriteLine("Error: output folder needs to end with '/' or '\\'." + outputPath); Environment.Exit(-1); } // Create ZED Camera Camera zed = new Camera(0); //Specify SVO path parameters InitParameters initParameters = new InitParameters() { inputType = INPUT_TYPE.SVO, pathSVO = svoInputPath, svoRealTimeMode = true, coordinateUnits = UNIT.MILLIMETER }; ERROR_CODE zedOpenState = zed.Open(ref initParameters); if (zedOpenState != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } Resolution imageSize = zed.GetCalibrationParameters().leftCam.resolution; sl.Mat leftImage = new sl.Mat(); leftImage.Create(imageSize, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat leftImageOCV = SLMat2CVMat(ref leftImage, MAT_TYPE.MAT_8U_C4); sl.Mat rightImage = new sl.Mat(); rightImage.Create(imageSize, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat rightImageOCV = SLMat2CVMat(ref rightImage, MAT_TYPE.MAT_8U_C4); sl.Mat depthImage = new sl.Mat(); depthImage.Create(imageSize, MAT_TYPE.MAT_32F_C1); OpenCvSharp.Mat depthImageOCV = SLMat2CVMat(ref depthImage, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat imageSideBySide = new OpenCvSharp.Mat(); if (outputAsVideo) { imageSideBySide = new OpenCvSharp.Mat((int)imageSize.height, (int)imageSize.width * 2, OpenCvSharp.MatType.CV_8UC3); } OpenCvSharp.VideoWriter videoWriter = new OpenCvSharp.VideoWriter(); //Create Video writter if (outputAsVideo) { int fourcc = OpenCvSharp.VideoWriter.FourCC('M', '4', 'S', '2'); // MPEG-4 part 2 codec int frameRate = Math.Max(zed.GetInitParameters().cameraFPS, 25); // Minimum write rate in OpenCV is 25 Console.WriteLine(outputPath); videoWriter.Open(outputPath, fourcc, frameRate, new OpenCvSharp.Size((int)imageSize.width * 2, (int)imageSize.height)); if (!videoWriter.IsOpened()) { Console.WriteLine("Error: OpenCV video writer cannot be opened. Please check the .avi file path and write permissions."); zed.Close(); Environment.Exit(-1); } } RuntimeParameters rtParams = new RuntimeParameters(); rtParams.sensingMode = SENSING_MODE.FILL; // Start SVO conversion to AVI/SEQUENCE Console.WriteLine("Converting SVO... press Q to interupt conversion"); int nbFrames = zed.GetSVONumberOfFrames(); int svoPosition = 0; zed.SetSVOPosition(svoPosition); while (!exit_app) { exit_app = (System.Windows.Input.Keyboard.IsKeyDown(System.Windows.Input.Key.Q) == true); ERROR_CODE err = zed.Grab(ref rtParams); if (err == ERROR_CODE.SUCCESS) { svoPosition = zed.GetSVOPosition(); // Retrieve SVO images zed.RetrieveImage(leftImage, VIEW.LEFT); switch (appType) { case APP_TYPE.LEFT_AND_RIGHT: zed.RetrieveImage(rightImage, VIEW.RIGHT); break; case APP_TYPE.LEFT_AND_DEPTH: zed.RetrieveImage(rightImage, VIEW.DEPTH); break; case APP_TYPE.LEFT_AND_DEPTH_16: zed.RetrieveMeasure(depthImage, MEASURE.DEPTH); break; default: break; } if (outputAsVideo) { // Convert SVO image from RGBA to RGB Cv2.CvtColor(leftImageOCV, imageSideBySide[new OpenCvSharp.Rect(0, 0, (int)imageSize.width, (int)imageSize.height)], ColorConversionCodes.BGRA2BGR); Cv2.CvtColor(rightImageOCV, imageSideBySide[new OpenCvSharp.Rect((int)imageSize.width, 0, (int)imageSize.width, (int)imageSize.height)], ColorConversionCodes.BGRA2BGR); // Write the RGB image in the video videoWriter.Write(imageSideBySide); } else { // Generate filenames string filename1 = ""; filename1 = outputPath + "/left" + svoPosition + ".png"; string filename2 = ""; filename2 = outputPath + (appType == APP_TYPE.LEFT_AND_RIGHT ? "/right" : "/depth") + svoPosition + ".png"; // Save Left images Cv2.ImWrite(filename1, leftImageOCV); //Save depth if (appType != APP_TYPE.LEFT_AND_DEPTH_16) { Cv2.ImWrite(filename2, rightImageOCV); } else { //Convert to 16 bit OpenCvSharp.Mat depth16 = new OpenCvSharp.Mat(); depthImageOCV.ConvertTo(depth16, MatType.CV_16UC1); Cv2.ImWrite(filename2, depth16); } } // Display Progress ProgressBar((float)svoPosition / (float)nbFrames, 30); } else if (zed.GetSVOPosition() >= nbFrames - (zed.GetInitParameters().svoRealTimeMode ? 2 : 1)) { Console.WriteLine("SVO end has been reached. Exiting now."); Environment.Exit(-1); exit_app = true; } else { Console.WriteLine("Grab Error : " + err); exit_app = true; } } if (outputAsVideo) { //Close the video writer videoWriter.Release(); } zed.Close(); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 15f; init_params.sdkVerbose = true; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } tracking_state = POSITIONAL_TRACKING_STATE.OFF; mapping_state = SPATIAL_MAPPING_STATE.NOT_ENABLED; mapping_activated = false; // Enable tracking PositionalTrackingParameters positionalTrackingParameters = new PositionalTrackingParameters(); zedCamera.EnablePositionalTracking(ref positionalTrackingParameters); runtimeParameters = new RuntimeParameters(); spatialMappingParameters = new SpatialMappingParameters(); spatialMappingParameters.resolutionMeter = SpatialMappingParameters.get(MAPPING_RESOLUTION.MEDIUM); spatialMappingParameters.saveTexture = false; if (CREATE_MESH) { spatialMappingParameters.map_type = SPATIAL_MAP_TYPE.MESH; } else { spatialMappingParameters.map_type = SPATIAL_MAP_TYPE.FUSED_POINT_CLOUD; } // Create ZED Objects filled in the main loop zedMat = new Mat(); cam_pose = new Pose(); //Create mesh. mesh = new Mesh(); fusedPointCloud = new FusedPointCloud(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; Resolution res = new Resolution((uint)Width, (uint)Height); zedMat.Create(res, MAT_TYPE.MAT_8U_C4, MEM.CPU); // Create OpenGL Viewer viewer = new GLViewer(new Resolution((uint)Width, (uint)Height)); Console.WriteLine("Hit SPACE BAR to start spatial mapping..."); // Create OpenGL window CreateWindow(); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 10f; init_params.cameraDisableSelfCalib = true; maxDepthDistance = init_params.depthMaximumDistance; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } // Enable tracking (mandatory for object detection) Quaternion quat = Quaternion.Identity; Vector3 vec = Vector3.Zero; zedCamera.EnablePositionalTracking(ref quat, ref vec); runtimeParameters = new RuntimeParameters(); // Enable the Objects detection module ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters(); obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis isTrackingON = obj_det_params.enableObjectTracking; obj_det_params.enable2DMask = false; obj_det_params.imageSync = true; // the object detection is synchronized to the image obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE; if (USE_BATCHING) { batchParameters = new BatchParameters(); batchParameters.latency = 2.0f; batchParameters.enable = true; batchHandler = new BatchSystemHandler((int)batchParameters.latency * 2); obj_det_params.batchParameters = batchParameters; } zedCamera.EnableObjectDetection(ref obj_det_params); // Configure object detection runtime parameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); detection_confidence = 60; obj_runtime_parameters.detectionConfidenceThreshold = detection_confidence; obj_runtime_parameters.objectClassFilter = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true); //obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(true); // To set a specific threshold obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = detection_confidence; //obj_runtime_parameters.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = detection_confidence; // Create ZED Objects filled in the main loop objects = new Objects(); imageLeft = new sl.Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720)); Resolution tracksRes = new Resolution(400, (uint)displayRes.height); // create a global image to store both image and tracks view globalImage = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width + (int)tracksRes.width, OpenCvSharp.MatType.CV_8UC4); // retrieve ref on image part imageLeftOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect(0, 0, (int)displayRes.width, (int)displayRes.height)); // retrieve ref on tracks part imageTrackOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect((int)displayRes.width, 0, (int)tracksRes.width, (int)tracksRes.height)); // init an sl::Mat from the ocv image ref (which is in fact the memory of global_image) imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU); imageRenderLeft = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr()); imgScale = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height); // Create OpenGL Viewer viewer = new GLViewer(); camWorldPose = new Pose(); camCameraPose = new Pose(); pointCloud = new sl.Mat(); pcRes = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404)); pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU); // 2D tracks trackViewGenerator = new TrackingViewer(tracksRes, (int)zedCamera.GetCameraFPS(), maxDepthDistance, 3); trackViewGenerator.setCameraCalibration(zedCamera.GetCalibrationParameters()); window_name = "ZED| 2D View and Birds view"; Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window Cv2.CreateTrackbar("Confidence", window_name, ref detection_confidence, 100); // Create OpenGL window CreateWindow(); }