// Render loop private void NativeWindow_Render(object sender, NativeWindowEventArgs e) { OpenGL.CoreUI.NativeWindow nativeWindow = (OpenGL.CoreUI.NativeWindow)sender; Gl.Viewport(0, 0, (int)nativeWindow.Width, (int)nativeWindow.Height); Gl.Clear(ClearBufferMask.ColorBufferBit); ERROR_CODE err = ERROR_CODE.FAILURE; if (viewer.isAvailable() && zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { if (imageLeft.IsInit()) { // Retrieve left image zedCamera.RetrieveMeasure(pointCloud, sl.MEASURE.XYZRGBA, sl.MEM.CPU, pcRes); zedCamera.RetrieveImage(imageLeft, sl.VIEW.LEFT, sl.MEM.CPU, displayRes); zedCamera.GetPosition(ref camPose, REFERENCE_FRAME.WORLD); // Retrieve Objects zedCamera.RetrieveObjects(ref objects, ref obj_runtime_parameters); TrackingViewer.render_2D(ref imageLeftOcv, imgScale, ref objects, isTrackingON); //Update GL View viewer.update(pointCloud, objects, camPose); viewer.render(); if (isPlayback && zedCamera.GetSVOPosition() == zedCamera.GetSVONumberOfFrames()) { return; } Cv2.ImShow(window_name, imageLeftOcv); } } }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 10f; init_params.cameraDisableSelfCalib = true; maxDepthDistance = init_params.depthMaximumDistance; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } // Enable tracking (mandatory for object detection) Quaternion quat = Quaternion.Identity; Vector3 vec = Vector3.Zero; zedCamera.EnablePositionalTracking(ref quat, ref vec); runtimeParameters = new RuntimeParameters(); // Enable the Objects detection module ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters(); obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis isTrackingON = obj_det_params.enableObjectTracking; obj_det_params.enable2DMask = false; obj_det_params.imageSync = true; // the object detection is synchronized to the image obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE; if (USE_BATCHING) { batchParameters = new BatchParameters(); batchParameters.latency = 2.0f; batchParameters.enable = true; batchHandler = new BatchSystemHandler((int)batchParameters.latency * 2); obj_det_params.batchParameters = batchParameters; } zedCamera.EnableObjectDetection(ref obj_det_params); // Configure object detection runtime parameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); detection_confidence = 60; obj_runtime_parameters.detectionConfidenceThreshold = detection_confidence; obj_runtime_parameters.objectClassFilter = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true); //obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(true); // To set a specific threshold obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = detection_confidence; //obj_runtime_parameters.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = detection_confidence; // Create ZED Objects filled in the main loop objects = new Objects(); imageLeft = new sl.Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720)); Resolution tracksRes = new Resolution(400, (uint)displayRes.height); // create a global image to store both image and tracks view globalImage = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width + (int)tracksRes.width, OpenCvSharp.MatType.CV_8UC4); // retrieve ref on image part imageLeftOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect(0, 0, (int)displayRes.width, (int)displayRes.height)); // retrieve ref on tracks part imageTrackOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect((int)displayRes.width, 0, (int)tracksRes.width, (int)tracksRes.height)); // init an sl::Mat from the ocv image ref (which is in fact the memory of global_image) imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU); imageRenderLeft = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr()); imgScale = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height); // Create OpenGL Viewer viewer = new GLViewer(); camWorldPose = new Pose(); camCameraPose = new Pose(); pointCloud = new sl.Mat(); pcRes = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404)); pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU); // 2D tracks trackViewGenerator = new TrackingViewer(tracksRes, (int)zedCamera.GetCameraFPS(), maxDepthDistance, 3); trackViewGenerator.setCameraCalibration(zedCamera.GetCalibrationParameters()); window_name = "ZED| 2D View and Birds view"; Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window Cv2.CreateTrackbar("Confidence", window_name, ref detection_confidence, 100); // Create OpenGL window CreateWindow(); }
// Render loop private void NativeWindow_Render(object sender, NativeWindowEventArgs e) { OpenGL.CoreUI.NativeWindow nativeWindow = (OpenGL.CoreUI.NativeWindow)sender; Gl.Viewport(0, 0, (int)nativeWindow.Width, (int)nativeWindow.Height); Gl.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit); ERROR_CODE err = ERROR_CODE.FAILURE; if (viewer.isAvailable() && zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { foreach (var it in obj_runtime_parameters.objectClassFilter) { obj_runtime_parameters.objectConfidenceThreshold[it] = detection_confidence; } // Retrieve Objects err = zedCamera.RetrieveObjects(ref objects, ref obj_runtime_parameters); if (err == ERROR_CODE.SUCCESS && objects.isNew != 0) { // Retrieve left image zedCamera.RetrieveMeasure(pointCloud, MEASURE.XYZRGBA, MEM.CPU, pcRes); zedCamera.GetPosition(ref camWorldPose, REFERENCE_FRAME.WORLD); zedCamera.GetPosition(ref camCameraPose, REFERENCE_FRAME.CAMERA); zedCamera.RetrieveImage(imageLeft, VIEW.LEFT, MEM.CPU, displayRes); bool update_render_view = true; bool update_3d_view = true; bool update_tracking_view = true; int nbBatches = 0; if (USE_BATCHING) { List <ObjectsBatch> objectsBatch = new List <ObjectsBatch>(); zedCamera.UpdateObjectsBatch(out nbBatches); for (int i = 0; i < nbBatches; i++) { ObjectsBatch obj_batch = new ObjectsBatch(); zedCamera.GetObjectsBatch(i, ref obj_batch); objectsBatch.Add(obj_batch); } batchHandler.push(camCameraPose, camWorldPose, imageLeft, pointCloud, ref objectsBatch); batchHandler.pop(ref camCameraPose, ref camWorldPose, ref imageLeft, ref pointCloud, ref objects); update_render_view = BatchSystemHandler.WITH_IMAGE_RETENTION ? Convert.ToBoolean(objects.isNew) : true; update_3d_view = BatchSystemHandler.WITH_IMAGE_RETENTION ? Convert.ToBoolean(objects.isNew) : true; } if (update_render_view) { imageRenderLeft.CopyTo(imageLeftOcv); TrackingViewer.render_2D(ref imageLeftOcv, imgScale, ref objects, true, isTrackingON); } if (update_3d_view) { //Update GL View viewer.update(pointCloud, objects, camWorldPose); viewer.render(); } if (update_tracking_view) { trackViewGenerator.generate_view(ref objects, camCameraPose, ref imageTrackOcv, Convert.ToBoolean(objects.isTracked)); } } if (isPlayback && zedCamera.GetSVOPosition() == zedCamera.GetSVONumberOfFrames()) { return; } Cv2.ImShow(window_name, globalImage); } }