static void Main(string[] args) { Camera zed = new Camera(0); InitParameters initParameters = new InitParameters() { sdkVerbose = true, resolution = RESOLUTION.HD720, depthMode = DEPTH_MODE.NONE }; parseArgs(args, ref initParameters); ERROR_CODE returnedState = zed.Open(ref initParameters); if (returnedState != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } string winName = "Camera control"; Cv2.NamedWindow(winName); Console.WriteLine("ZED Model : " + zed.GetCameraModel()); Console.WriteLine("ZED Serial Number : " + zed.GetZEDSerialNumber()); Console.WriteLine("ZED Camera Firmware : " + zed.GetCameraFirmwareVersion()); Console.WriteLine("ZED Camera Resolution : " + zed.GetInitParameters().resolution); Console.WriteLine("ZED Camera FPS : " + zed.GetInitParameters().cameraFPS); // Print help control printHelp(); sl.Mat zedImage = new sl.Mat(); zedImage.Create(new Resolution((uint)zed.ImageWidth, (uint)zed.ImageHeight), MAT_TYPE.MAT_8U_C4); // Initialise camera setting switchCameraSettings(); char key = ' '; RuntimeParameters rtParams = new RuntimeParameters(); while (key != 'q') { // Check that a new image is successfully acquired returnedState = zed.Grab(ref rtParams); if (returnedState == ERROR_CODE.SUCCESS) { //Retrieve left image zed.RetrieveImage(zedImage, VIEW.LEFT); // Convert to cvMat OpenCvSharp.Mat cvImage = new OpenCvSharp.Mat(zedImage.GetHeight(), zedImage.GetWidth(), OpenCvSharp.MatType.CV_8UC4, zedImage.GetPtr()); Cv2.ImShow(winName, cvImage); } else { Console.WriteLine("ERROR during capture"); break; } key = (char)Cv2.WaitKey(10); // Change camera settings with keyboard updateCameraSettings(key, ref zed); } }
/// <summary> /// Creates an OpenCV version of a ZED Mat. /// </summary> /// <param name="zedmat">Source ZED Mat.</param> /// <param name="zedmattype">Type of ZED Mat - data type and channel number. /// <returns></returns> private static OpenCvSharp.Mat SLMat2CVMat(ref sl.Mat zedmat, MAT_TYPE zedmattype) { int cvmattype = SLMatType2CVMatType(zedmattype); OpenCvSharp.Mat cvmat = new OpenCvSharp.Mat(zedmat.GetHeight(), zedmat.GetWidth(), cvmattype, zedmat.GetPtr()); return(cvmat); }
public void pushNewImage(Mat zedImage) { // Update Texture with current zedImage Gl.TexSubImage2D(TextureTarget.Texture2d, 0, 0, 0, zedImage.GetWidth(), zedImage.GetHeight(), PixelFormat.Rgba, PixelType.UnsignedByte, zedImage.GetPtr()); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 10f; init_params.cameraDisableSelfCalib = true; maxDepthDistance = init_params.depthMaximumDistance; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } // Enable tracking (mandatory for object detection) Quaternion quat = Quaternion.Identity; Vector3 vec = Vector3.Zero; zedCamera.EnablePositionalTracking(ref quat, ref vec); runtimeParameters = new RuntimeParameters(); // Enable the Objects detection module ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters(); obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis isTrackingON = obj_det_params.enableObjectTracking; obj_det_params.enable2DMask = false; obj_det_params.imageSync = true; // the object detection is synchronized to the image obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE; if (USE_BATCHING) { batchParameters = new BatchParameters(); batchParameters.latency = 2.0f; batchParameters.enable = true; batchHandler = new BatchSystemHandler((int)batchParameters.latency * 2); obj_det_params.batchParameters = batchParameters; } zedCamera.EnableObjectDetection(ref obj_det_params); // Configure object detection runtime parameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); detection_confidence = 60; obj_runtime_parameters.detectionConfidenceThreshold = detection_confidence; obj_runtime_parameters.objectClassFilter = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true); //obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(true); // To set a specific threshold obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = detection_confidence; //obj_runtime_parameters.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = detection_confidence; // Create ZED Objects filled in the main loop objects = new Objects(); imageLeft = new sl.Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720)); Resolution tracksRes = new Resolution(400, (uint)displayRes.height); // create a global image to store both image and tracks view globalImage = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width + (int)tracksRes.width, OpenCvSharp.MatType.CV_8UC4); // retrieve ref on image part imageLeftOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect(0, 0, (int)displayRes.width, (int)displayRes.height)); // retrieve ref on tracks part imageTrackOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect((int)displayRes.width, 0, (int)tracksRes.width, (int)tracksRes.height)); // init an sl::Mat from the ocv image ref (which is in fact the memory of global_image) imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU); imageRenderLeft = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr()); imgScale = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height); // Create OpenGL Viewer viewer = new GLViewer(); camWorldPose = new Pose(); camCameraPose = new Pose(); pointCloud = new sl.Mat(); pcRes = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404)); pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU); // 2D tracks trackViewGenerator = new TrackingViewer(tracksRes, (int)zedCamera.GetCameraFPS(), maxDepthDistance, 3); trackViewGenerator.setCameraCalibration(zedCamera.GetCalibrationParameters()); window_name = "ZED| 2D View and Birds view"; Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window Cv2.CreateTrackbar("Confidence", window_name, ref detection_confidence, 100); // Create OpenGL window CreateWindow(); }