public void update(Mat image, Objects objects) { image_handler.pushNewImage(image); //if(objects.isNew == 0) return; BBox_faces.clear(); BBox_edges.clear(); objectsName.Clear(); // For each object for (int idx = 0; idx < objects.numObject; idx++) { sl.ObjectData obj = objects.objectData[idx]; // Only show tracked objects if (renderObject(obj)) { List <Vector3> bb_ = new List <Vector3>(); bb_.AddRange(obj.boundingBox); if (bb_.Count > 0) { float4 clr_id = generateColorClass(obj.id); float4 clr_class = generateColorClass((int)obj.label); if (obj.objectTrackingState != sl.OBJECT_TRACKING_STATE.OK) { clr_id = clr_class; } else { createIDRendering(obj.position, clr_id, obj.id); } createBboxRendering(bb_, clr_id); } } } }
static void Main(string[] args) { // Set Initialization parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; init_params.coordinateUnits = UNIT.METER; init_params.sdkVerbose = true; Camera zedCamera = new Camera(0); // Open the camera ERROR_CODE err = zedCamera.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) Environment.Exit(-1); // Enable positional tracking PositionalTrackingParameters trackingParams = new PositionalTrackingParameters(); // If you want to have object tracking you need to enable positional tracking first err = zedCamera.EnablePositionalTracking(ref trackingParams); if (err != ERROR_CODE.SUCCESS) Environment.Exit(-1); // Enable Object Detection object_detection_parameters = new ObjectDetectionParameters(); // Different model can be chosen, optimizing the runtime or the accuracy object_detection_parameters.detectionModel = sl.DETECTION_MODEL.HUMAN_BODY_FAST; // track detects object across time and space object_detection_parameters.enableObjectTracking = true; // run detection for every Camera grab object_detection_parameters.imageSync = true; err = zedCamera.EnableObjectDetection(ref object_detection_parameters); if (err != ERROR_CODE.SUCCESS) Environment.Exit(-1); // Create Runtime parameters RuntimeParameters runtimeParameters = new RuntimeParameters(); // Create Object Detection frame handle (contains all the objects data) sl.Objects objects = new sl.Objects(); // Create object detection runtime parameters (confidence, ...) ObjectDetectionRuntimeParameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters(); obj_runtime_parameters.detectionConfidenceThreshold = 40; int nbDetection = 0; while (nbDetection < 100) { if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { // Retrieve Objects from Object detection zedCamera.RetrieveObjects(ref objects, ref obj_runtime_parameters); if (Convert.ToBoolean(objects.isNew)) { Console.WriteLine(objects.numObject + " Person(s) detected"); Console.WriteLine(); if (objects.numObject > 0) { sl.ObjectData firstObject = objects.objectData[0]; Console.WriteLine("First Person attributes :"); Console.WriteLine(" Confidence (" + firstObject.confidence); if (object_detection_parameters.enableObjectTracking) { Console.WriteLine(" Tracking ID: " + firstObject.id + " tracking state: " + firstObject.objectTrackingState + " / " + firstObject.actionState); } Console.WriteLine(" 3D Position: " + firstObject.position + " Velocity: " + firstObject.velocity); Console.WriteLine(" Keypoints 2D"); // The body part meaning can be obtained by casting the index into a BODY_PARTS // to get the BODY_PARTS index the getIdx function is available for (int i = 0; i < firstObject.keypoints2D.Length; i++) { var kp = firstObject.keypoints2D[i]; Console.WriteLine(" " + (sl.BODY_PARTS)i + " " + kp.X + ", " + kp.Y); } // The BODY_PARTS can be link as bones, using sl::BODY_BONES which gives the BODY_PARTS pair for each Console.WriteLine(" Keypoints 3D "); for (int i = 0; i < firstObject.keypoints.Length; i++) { var kp = firstObject.keypoints[i]; Console.WriteLine(" " + (sl.BODY_PARTS)i + " " + kp.X + ", " + kp.Y + ", " + kp.Z); } Console.WriteLine(); Console.WriteLine("Press 'Enter' to continue..."); Console.ReadLine(); } } } } // Disable object detection, positional tracking and close the camera zedCamera.DisableObjectDetection(); zedCamera.DisablePositionalTracking(""); zedCamera.Close(); }
public void update(Mat image, Objects objects) { image_handler.pushNewImage(image); bones.clear(); joints.clear(); BBox_faces.clear(); BBox_edges.clear(); if (Keyboard.IsKeyDown(Key.B)) { showbbox = !showbbox; } // For each object for (int idx = 0; idx < objects.numObject; idx++) { sl.ObjectData obj = objects.objectData[idx]; // Only show tracked objects if (renderObject(obj)) { List <Vector3> bb_ = new List <Vector3>(); bb_.AddRange(obj.boundingBox); float4 clr_id = generateColorClass(obj.id); float4 clr_class = generateColorClass((int)obj.label); Vector3[] keypoints = obj.keypoints; if (showbbox) { if (obj.objectTrackingState != sl.OBJECT_TRACKING_STATE.OK) { clr_id = clr_class; } else { createIDRendering(obj.position, clr_id, obj.id); } createBboxRendering(bb_, clr_id); } if (keypoints.Length > 0) { foreach (var limb in BODY_BONES) { Vector3 kp_1 = keypoints[getIdx(limb.Item1)]; Vector3 kp_2 = keypoints[getIdx(limb.Item2)]; float norm_1 = kp_1.Length(); float norm_2 = kp_2.Length(); if (!float.IsNaN(norm_1) && norm_1 > 0 && !float.IsNaN(norm_2) && norm_2 > 0) { bones.addCylinder(new float3(kp_1.X, kp_1.Y, kp_1.Z), new float3(kp_2.X, kp_2.Y, kp_2.Z), clr_id); } } for (int i = 0; i < (int)BODY_PARTS.LAST; i++) { Vector3 kp = keypoints[i]; float norm = kp.Length(); if (!float.IsNaN(norm) && norm > 0) { joints.addSphere(new float3(kp.X, kp.Y, kp.Z), clr_id); } } } } } }