static void Main(string[] args) { // Set Initialization parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMode = DEPTH_MODE.PERFORMANCE; Camera zedCamera = new Camera(0); // Open the camera ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Initialize runtime parameters and frame counter int i = 0; // Get resolution of camera uint mWidth = (uint)zedCamera.ImageWidth; uint mHeight = (uint)zedCamera.ImageHeight; // Initialize the Mat that will contain the Point Cloud Mat depth_map = new Mat(); depth_map.Create(mWidth, mHeight, MAT_TYPE.MAT_32F_C1, MEM.CPU); // Mat need to be created before use. // To avoid Nan Values, set to FILL to remove holes. RuntimeParameters runtimeParameters = new RuntimeParameters(); runtimeParameters.sensingMode = SENSING_MODE.FILL; while (i < 1000) { if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { // Get the pose of the left eye of the camera with reference to the world frame zedCamera.RetrieveMeasure(depth_map, MEASURE.XYZRGBA); // Display the X, Y , Z at the center of the image if (i % 10 == 0) { float4 xyz_value; depth_map.GetValue((int)mWidth / 2, (int)mHeight / 2, out xyz_value, MEM.CPU); Console.WriteLine("Depth At Image Center : (" + xyz_value.x + "," + xyz_value.y + "," + xyz_value.z + ")"); } // increment frame count i++; } } // Disable positional tracking and close the camera zedCamera.DisablePositionalTracking(""); zedCamera.Close(); }
static void Main(string[] args) { // Set Initialization parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMode = DEPTH_MODE.PERFORMANCE; Camera zedCamera = new Camera(0); // Open the camera ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } PositionalTrackingParameters positionalTrackingParameters = new PositionalTrackingParameters(); err = zedCamera.EnablePositionalTracking(ref positionalTrackingParameters); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } int i = 0; sl.Pose pose = new Pose(); RuntimeParameters runtimeParameters = new RuntimeParameters(); while (i < 1000) { if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { // Get the pose of the left eye of the camera with reference to the world frame zedCamera.GetPosition(ref pose, REFERENCE_FRAME.WORLD); // Display the translation and timestamp each 10 frames if (i % 10 == 0) { Console.WriteLine("Translation : " + pose.translation + ", Rotation : " + pose.rotation + ", Timestamp : " + pose.timestamp); } i++; } } // Disable positional tracking and close the camera zedCamera.DisablePositionalTracking(""); zedCamera.Close(); }
static void Main(string[] args) { // Set Initialization parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; Camera zedCamera = new Camera(0); // Open the camera ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Get resolution of camera uint mWidth = (uint)zedCamera.ImageWidth; uint mHeight = (uint)zedCamera.ImageHeight; // Initialize the Mat that will contain the left image Mat image = new Mat(); image.Create(mWidth, mHeight, MAT_TYPE.MAT_8U_C4, MEM.CPU); // Mat need to be created before use. // defin default Runtime parameters RuntimeParameters runtimeParameters = new RuntimeParameters(); // Initialize runtime parameters and frame counter int i = 0; while (i < 1000) { if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { zedCamera.RetrieveImage(image, VIEW.LEFT); // Get the left image ulong timestamp = zedCamera.GetCameraTimeStamp(); // Get image timestamp Console.WriteLine("Image resolution: " + image.GetWidth() + "x" + image.GetHeight() + "|| Image timestamp: " + timestamp); // increment frame count i++; } } // Disable positional tracking and close the camera zedCamera.DisablePositionalTracking(""); zedCamera.Close(); }
private void parseArgs(string[] args, ref sl.InitParameters param) { if (args.Length > 0 && args[0].IndexOf(".svo") != -1) { // SVO input mode param.inputType = INPUT_TYPE.SVO; param.pathSVO = args[0]; isPlayback = true; Console.WriteLine("[Sample] Using SVO File input: " + args[0]); } else if (args.Length > 0 && args[0].IndexOf(".svo") == -1) { IPAddress ip; string arg = args[0]; if (IPAddress.TryParse(arg, out ip)) { // Stream input mode - IP + port param.inputType = INPUT_TYPE.STREAM; param.ipStream = ip.ToString(); Console.WriteLine("[Sample] Using Stream input, IP : " + ip); } else if (args[0].IndexOf("HD2K") != -1) { param.resolution = sl.RESOLUTION.HD2K; Console.WriteLine("[Sample] Using Camera in resolution HD2K"); } else if (args[0].IndexOf("HD1080") != -1) { param.resolution = sl.RESOLUTION.HD1080; Console.WriteLine("[Sample] Using Camera in resolution HD1080"); } else if (args[0].IndexOf("HD720") != -1) { param.resolution = sl.RESOLUTION.HD720; Console.WriteLine("[Sample] Using Camera in resolution HD720"); } else if (args[0].IndexOf("VGA") != -1) { param.resolution = sl.RESOLUTION.VGA; Console.WriteLine("[Sample] Using Camera in resolution VGA"); } } else { // } }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 50f; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Enable trackin Quaternion quat = Quaternion.Identity; Vector3 vec = Vector3.Zero; zedCamera.EnablePositionalTracking(ref quat, ref vec); runtimeParameters = new RuntimeParameters(); point_cloud = new Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; res = new Resolution((uint)Width, (uint)Height); point_cloud.Create(res, MAT_TYPE.MAT_32F_C4, MEM.CPU); // Create OpenGL Viewer viewer = new GLViewer(); cam_pose = new Pose(); // Create OpenGL window CreateWindow(); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.sdkVerbose = true; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Enable tracking PositionalTrackingParameters trackingParams = new PositionalTrackingParameters(); trackingParams.enableAreaMemory = true; zedCamera.EnablePositionalTracking(ref trackingParams); runtimeParameters = new RuntimeParameters(); cameraModel = zedCamera.GetCameraModel(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; res = new Resolution((uint)Width, (uint)Height); // Create OpenGL Viewer viewer = new GLViewer(); cam_pose = new Pose(); // Create OpenGL window CreateWindow(); }
static void Main(string[] args) { // Create the camera Camera zedCamera = new Camera(0); // Create default configuration parameters InitParameters init_params = new InitParameters(); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Get camera information (serial number) int zed_serial = zedCamera.GetZEDSerialNumber(); Console.WriteLine("Hello! This is my serial number: " + zed_serial); Console.ReadLine(); zedCamera.Close(); }
static void Main(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; init_params.cameraFPS = 30; Camera zed = new Camera(0); // Open the camera ERROR_CODE err = zed.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } SensorsData sensors_data = new SensorsData(); ulong last_imu_timestamp = 0; RuntimeParameters runtimeParameters = new RuntimeParameters(); while (zed.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { zed.GetSensorsData(ref sensors_data, TIME_REFERENCE.CURRENT); if (sensors_data.imu.timestamp > last_imu_timestamp) { // Show Sensors Data Console.WriteLine("IMU Orientation : " + sensors_data.imu.fusedOrientation); Console.WriteLine("Angular Velocity : " + sensors_data.imu.angularVelocity); Console.WriteLine("Magnetometer Magnetic field : " + sensors_data.magnetometer.magneticField); Console.WriteLine("Barometer Atmospheric pressure : " + sensors_data.barometer.pressure); last_imu_timestamp = sensors_data.imu.timestamp; // Wait for the [ENTER] key to be pressed Console.ReadLine(); } } zed.Close(); }
static void Main(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; init_params.cameraFPS = 30; Camera zedCamera = new Camera(0); // Open the camera ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Get camera information (serial number) int zed_serial = zedCamera.GetZEDSerialNumber(); Console.WriteLine("Hello! This is my serial number: " + zed_serial); Console.ReadLine(); zedCamera.Close(); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 15f; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } // Enable tracking (mandatory for object detection) Quaternion quat = Quaternion.Identity; Vector3 vec = Vector3.Zero; zedCamera.EnablePositionalTracking(ref quat, ref vec); runtimeParameters = new RuntimeParameters(); // Enable the Objects detection module ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters(); obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis obj_det_params.enable2DMask = false; obj_det_params.enableBodyFitting = true; // smooth skeletons moves obj_det_params.imageSync = true; // the object detection is synchronized to the image obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE; zedCamera.EnableObjectDetection(ref obj_det_params); // Create ZED Objects filled in the main loop object_frame = new Objects(); zedMat = new Mat(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; Resolution res = new Resolution((uint)Width, (uint)Height); zedMat.Create(res, MAT_TYPE.MAT_8U_C4, MEM.CPU); // Create OpenGL Viewer viewer = new GLViewer(new Resolution((uint)Width, (uint)Height)); // Configure object detection runtime parameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); obj_runtime_parameters.detectionConfidenceThreshold = 35; obj_runtime_parameters.objectClassFilter = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true); // To set a specific threshold obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST]; obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = 35; // Create OpenGL window CreateWindow(); }
static void Main(string[] args) { // Set Initialization parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.coordinateUnits = UNIT.METER; init_params.sdkVerbose = true; Camera zedCamera = new Camera(0); // Open the camera ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) Environment.Exit(-1); // Enable positional tracking PositionalTrackingParameters trackingParams = new PositionalTrackingParameters(); // If you want to have object tracking you need to enable positional tracking first err = zedCamera.EnablePositionalTracking(ref trackingParams); if (err != ERROR_CODE.SUCCESS) Environment.Exit(-1); // Enable Object Detection object_detection_parameters = new ObjectDetectionParameters(); // Different model can be chosen, optimizing the runtime or the accuracy object_detection_parameters.detectionModel = sl.DETECTION_MODEL.HUMAN_BODY_FAST; // track detects object across time and space object_detection_parameters.enableObjectTracking = true; // run detection for every Camera grab object_detection_parameters.imageSync = true; err = zedCamera.EnableObjectDetection(ref object_detection_parameters); if (err != ERROR_CODE.SUCCESS) Environment.Exit(-1); // Create Runtime parameters RuntimeParameters runtimeParameters = new RuntimeParameters(); // Create Object Detection frame handle (contains all the objects data) sl.Objects objects = new sl.Objects(); // Create object detection runtime parameters (confidence, ...) ObjectDetectionRuntimeParameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); obj_runtime_parameters.detectionConfidenceThreshold = 40; int nbDetection = 0; while (nbDetection < 100) { if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { // Retrieve Objects from Object detection zedCamera.RetrieveObjects(ref objects, ref obj_runtime_parameters); if (Convert.ToBoolean(objects.isNew)) { Console.WriteLine(objects.numObject + " Person(s) detected"); Console.WriteLine(); if (objects.numObject > 0) { sl.ObjectData firstObject = objects.objectData[0]; Console.WriteLine("First Person attributes :"); Console.WriteLine(" Confidence (" + firstObject.confidence); if (object_detection_parameters.enableObjectTracking) { Console.WriteLine(" Tracking ID: " + firstObject.id + " tracking state: " + firstObject.objectTrackingState + " / " + firstObject.actionState); } Console.WriteLine(" 3D Position: " + firstObject.position + " Velocity: " + firstObject.velocity); Console.WriteLine(" Keypoints 2D"); // The body part meaning can be obtained by casting the index into a BODY_PARTS // to get the BODY_PARTS index the getIdx function is available for (int i = 0; i < firstObject.keypoints2D.Length; i++) { var kp = firstObject.keypoints2D[i]; Console.WriteLine(" " + (sl.BODY_PARTS)i + " " + kp.X + ", " + kp.Y); } // The BODY_PARTS can be link as bones, using sl::BODY_BONES which gives the BODY_PARTS pair for each Console.WriteLine(" Keypoints 3D "); for (int i = 0; i < firstObject.keypoints.Length; i++) { var kp = firstObject.keypoints[i]; Console.WriteLine(" " + (sl.BODY_PARTS)i + " " + kp.X + ", " + kp.Y + ", " + kp.Z); } Console.WriteLine(); Console.WriteLine("Press 'Enter' to continue..."); Console.ReadLine(); } } } } // Disable object detection, positional tracking and close the camera zedCamera.DisableObjectDetection(); zedCamera.DisablePositionalTracking(""); zedCamera.Close(); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD1080; init_params.cameraFPS = 30; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (!(zedCamera.CameraModel == sl.MODEL.ZED2 || zedCamera.CameraModel == sl.MODEL.ZED2i)) { Console.WriteLine(" ERROR : Use ZED2/ZED2i Camera only"); return; } // Enable tracking (mandatory for object detection) PositionalTrackingParameters positionalTrackingParameters = new PositionalTrackingParameters(); zedCamera.EnablePositionalTracking(ref positionalTrackingParameters); runtimeParameters = new RuntimeParameters(); // Enable the Objects detection module ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters(); obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis isTrackingON = obj_det_params.enableObjectTracking; obj_det_params.enable2DMask = false; obj_det_params.enableBodyFitting = true; // smooth skeletons moves obj_det_params.imageSync = true; // the object detection is synchronized to the image obj_det_params.detectionModel = sl.DETECTION_MODEL.HUMAN_BODY_ACCURATE; zedCamera.EnableObjectDetection(ref obj_det_params); // Create ZED Objects filled in the main loop camPose = new sl.Pose(); objects = new Objects(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; imageLeft = new Mat(); displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720)); imgScale = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height); imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU); imageLeftOcv = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr()); pointCloud = new sl.Mat(); pcRes = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404)); pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU); // Create OpenGL Viewer viewer = new GLViewer(new Resolution((uint)Width, (uint)Height)); // Configure object detection runtime parameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); obj_runtime_parameters.detectionConfidenceThreshold = 40; window_name = "ZED| 2D View"; Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window // Create OpenGL window CreateWindow(); }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.sdkVerbose = true; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } findPlaneStatus = ERROR_CODE.FAILURE; tracking_state = POSITIONAL_TRACKING_STATE.OFF; hasIMU = zedCamera.GetSensorsConfiguration().gyroscope_parameters.isAvailable; userAction = new UserAction(); // Enable tracking PositionalTrackingParameters positionalTrackingParameters = new PositionalTrackingParameters(); zedCamera.EnablePositionalTracking(ref positionalTrackingParameters); runtimeParameters = new RuntimeParameters(); runtimeParameters.measure3DReferenceFrame = REFERENCE_FRAME.WORLD; // Create ZED Objects filled in the main loop zedMat = new Mat(); cam_pose = new Pose(); //Create mesh. planeMeshTriangles = new int[65000]; planeMeshVertices = new Vector3[65000]; plane = new PlaneData(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; Resolution res = new Resolution((uint)Width, (uint)Height); zedMat.Create(res, MAT_TYPE.MAT_8U_C4, MEM.CPU); // Create OpenGL Viewer viewer = new GLViewer(new Resolution((uint)Width, (uint)Height)); // Create OpenGL window CreateWindow(); }
void Awake() { instance = this; zedReady = false; //If you want the ZEDRig not to be destroyed DontDestroyOnLoad(transform.root); //Init the first parameters initParameters = new sl.InitParameters(); initParameters.resolution = resolution; initParameters.depthMode = depthMode; initParameters.depthStabilization = depthStabilizer; //Check if the AR is needed and if possible to add CheckStereoMode(); //Init the other options isZEDTracked = enableTracking; initialPosition = zedRigRoot.transform.localPosition; zedPosition = initialPosition; zedOrientation = initialRotation; //Create a camera and return an error message if the dependencies are not detected zedCamera = sl.ZEDCamera.GetInstance(); LastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST; zedSVOManager = GetComponent <ZEDSVOManager>(); zedCamera.CreateCamera(wrapperVerbose); if (zedSVOManager != null) { //Create a camera if ((zedSVOManager.read || zedSVOManager.record) && zedSVOManager.videoFile.Length == 0) { zedSVOManager.record = false; zedSVOManager.read = false; } if (zedSVOManager.read) { zedSVOManager.record = false; initParameters.pathSVO = zedSVOManager.videoFile; initParameters.svoRealTimeMode = zedSVOManager.realtimePlayback; initParameters.depthStabilization = depthStabilizer; } } versionZED = "[SDK]: " + sl.ZEDCamera.GetSDKVersion().ToString() + " [Plugin]: " + sl.ZEDCamera.PluginVersion.ToString(); //Set the ZED Tracking frame as Left eye if (isStereoRig) { //Creates a CameraRig (the 2 last cameras) GameObject o = CreateZEDRigDisplayer(); o.hideFlags = HideFlags.HideAndDontSave; o.transform.parent = transform; //Force some initParameters that are required for MR experience initParameters.enableRightSideMeasure = isStereoRig; initParameters.depthMinimumDistance = 0.1f; initParameters.depthMode = sl.DEPTH_MODE.PERFORMANCE; initParameters.depthStabilization = depthStabilizer; //Create the mirror, the texture from the firsts cameras is rendered to avoid a black border CreateMirror(); } //Start the co routine to initialize the ZED and avoid to block the user LastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST; openingLaunched = false; StartCoroutine("InitZED"); OnCamBrightnessChange += CameraBrightnessChangeHandler; }
public MainWindow(string[] args) { // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.cameraFPS = 60; init_params.depthMode = DEPTH_MODE.ULTRA; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMaximumDistance = 15f; init_params.sdkVerbose = true; parseArgs(args, ref init_params); // Open the camera zedCamera = new Camera(0); ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } if (zedCamera.CameraModel != sl.MODEL.ZED2) { Console.WriteLine(" ERROR : Use ZED2 Camera only"); return; } tracking_state = POSITIONAL_TRACKING_STATE.OFF; mapping_state = SPATIAL_MAPPING_STATE.NOT_ENABLED; mapping_activated = false; // Enable tracking PositionalTrackingParameters positionalTrackingParameters = new PositionalTrackingParameters(); zedCamera.EnablePositionalTracking(ref positionalTrackingParameters); runtimeParameters = new RuntimeParameters(); spatialMappingParameters = new SpatialMappingParameters(); spatialMappingParameters.resolutionMeter = SpatialMappingParameters.get(MAPPING_RESOLUTION.MEDIUM); spatialMappingParameters.saveTexture = false; if (CREATE_MESH) { spatialMappingParameters.map_type = SPATIAL_MAP_TYPE.MESH; } else { spatialMappingParameters.map_type = SPATIAL_MAP_TYPE.FUSED_POINT_CLOUD; } // Create ZED Objects filled in the main loop zedMat = new Mat(); cam_pose = new Pose(); //Create mesh. mesh = new Mesh(); fusedPointCloud = new FusedPointCloud(); int Height = zedCamera.ImageHeight; int Width = zedCamera.ImageWidth; Resolution res = new Resolution((uint)Width, (uint)Height); zedMat.Create(res, MAT_TYPE.MAT_8U_C4, MEM.CPU); // Create OpenGL Viewer viewer = new GLViewer(new Resolution((uint)Width, (uint)Height)); Console.WriteLine("Hit SPACE BAR to start spatial mapping..."); // Create OpenGL window CreateWindow(); }
static void Main(string[] args) { // Set Initialization parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD2K; init_params.coordinateUnits = UNIT.METER; init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; init_params.depthMode = DEPTH_MODE.PERFORMANCE; Camera zedCamera = new Camera(0); // Open the camera ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Enable positional tracking PositionalTrackingParameters trackingParams = new PositionalTrackingParameters(); err = zedCamera.EnablePositionalTracking(ref trackingParams); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Enable Object Detection ObjectDetectionParameters object_detection_parameters = new ObjectDetectionParameters(); object_detection_parameters.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX; object_detection_parameters.enableObjectTracking = true; err = zedCamera.EnableObjectDetection(ref object_detection_parameters); if (err != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } // Create Runtime parameters RuntimeParameters runtimeParameters = new RuntimeParameters(); // Create Object Detection frame handle (contains all the objects data) sl.Objects object_frame = new sl.Objects(); // Create object detection runtime parameters (confidence, ...) ObjectDetectionRuntimeParameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); obj_runtime_parameters.detectionConfidenceThreshold = 50; int i = 0; while (i < 1000) { if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { // Retrieve Objects from Object detection err = zedCamera.RetrieveObjects(ref object_frame, ref obj_runtime_parameters); // Display the data each 10 frames if (i % 10 == 0) { Console.WriteLine("Nb Objects Detection : " + object_frame.numObject); for (int p = 0; p < object_frame.numObject; p++) { Console.WriteLine("Position of object " + p + " : " + object_frame.objectData[p].position + "Tracked? : " + object_frame.objectData[p].objectTrackingState); } } i++; } } // Disable object detection, positional tracking and close the camera zedCamera.DisableObjectDetection(); zedCamera.DisablePositionalTracking(""); zedCamera.Close(); }