Ejemplo n.º 1
0
        public MainWindow(string[] args)
        {
            // Set configuration parameters
            InitParameters init_params = new InitParameters();

            init_params.resolution           = RESOLUTION.HD720;
            init_params.cameraFPS            = 60;
            init_params.depthMode            = DEPTH_MODE.ULTRA;
            init_params.coordinateUnits      = UNIT.METER;
            init_params.coordinateSystem     = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP;
            init_params.depthMaximumDistance = 15f;

            parseArgs(args, ref init_params);
            // Open the camera
            zedCamera = new Camera(0);
            ERROR_CODE err = zedCamera.Open(ref init_params);

            if (err != ERROR_CODE.SUCCESS)
            {
                Environment.Exit(-1);
            }

            if (zedCamera.CameraModel != sl.MODEL.ZED2)
            {
                Console.WriteLine(" ERROR : Use ZED2 Camera only");
                return;
            }

            // Enable tracking (mandatory for object detection)
            Quaternion quat = Quaternion.Identity;
            Vector3    vec  = Vector3.Zero;

            zedCamera.EnablePositionalTracking(ref quat, ref vec);

            runtimeParameters = new RuntimeParameters();
            // Enable the Objects detection module
            ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters();

            obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis
            obj_det_params.enable2DMask         = false;
            obj_det_params.enableBodyFitting    = true; // smooth skeletons moves
            obj_det_params.imageSync            = true; // the object detection is synchronized to the image
            obj_det_params.detectionModel       = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE;

            zedCamera.EnableObjectDetection(ref obj_det_params);

            // Create ZED Objects filled in the main loop
            object_frame = new Objects();
            zedMat       = new Mat();
            int Height = zedCamera.ImageHeight;
            int Width  = zedCamera.ImageWidth;

            Resolution res = new Resolution((uint)Width, (uint)Height);

            zedMat.Create(res, MAT_TYPE.MAT_8U_C4, MEM.CPU);

            // Create OpenGL Viewer
            viewer = new GLViewer(new Resolution((uint)Width, (uint)Height));

            // Configure object detection runtime parameters
            obj_runtime_parameters = new ObjectDetectionRuntimeParameters();
            obj_runtime_parameters.detectionConfidenceThreshold = 35;
            obj_runtime_parameters.objectClassFilter            = new int[(int)OBJECT_CLASS.LAST];
            obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true);

            // To set a specific threshold
            obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST];
            obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = 35;

            // Create OpenGL window
            CreateWindow();
        }
Ejemplo n.º 2
0
        static void Main(string[] args)
        {
            // Set Initialization parameters
            InitParameters init_params = new InitParameters();
            init_params.resolution = RESOLUTION.HD720;
            init_params.coordinateUnits = UNIT.METER;
            init_params.sdkVerbose = true;

            Camera zedCamera = new Camera(0);
            // Open the camera
            ERROR_CODE err = zedCamera.Open(ref init_params);
            if (err != ERROR_CODE.SUCCESS)
                Environment.Exit(-1);

            // Enable positional tracking
            PositionalTrackingParameters trackingParams = new PositionalTrackingParameters();
            // If you want to have object tracking you need to enable positional tracking first
            err = zedCamera.EnablePositionalTracking(ref trackingParams);
            if (err != ERROR_CODE.SUCCESS)
                Environment.Exit(-1);

            // Enable Object Detection
            object_detection_parameters = new ObjectDetectionParameters();
            // Different model can be chosen, optimizing the runtime or the accuracy
            object_detection_parameters.detectionModel = sl.DETECTION_MODEL.HUMAN_BODY_FAST;
            // track detects object across time and space
            object_detection_parameters.enableObjectTracking = true;
            // run detection for every Camera grab
            object_detection_parameters.imageSync = true;
            err = zedCamera.EnableObjectDetection(ref object_detection_parameters);
            if (err != ERROR_CODE.SUCCESS)
                Environment.Exit(-1);

            // Create Runtime parameters
            RuntimeParameters runtimeParameters = new RuntimeParameters();

            // Create Object Detection frame handle (contains all the objects data)
            sl.Objects objects = new sl.Objects();
            // Create object detection runtime parameters (confidence, ...)
            ObjectDetectionRuntimeParameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters();
            obj_runtime_parameters.detectionConfidenceThreshold = 40;


            int nbDetection = 0;
            while (nbDetection < 100)
            {
                if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS)
                {
                    // Retrieve Objects from Object detection
                    zedCamera.RetrieveObjects(ref objects, ref obj_runtime_parameters);
                    
                    if (Convert.ToBoolean(objects.isNew))
                    {
                        Console.WriteLine(objects.numObject + " Person(s) detected");
                        Console.WriteLine();
                        if (objects.numObject > 0)
                        {
                            sl.ObjectData firstObject = objects.objectData[0];

                            Console.WriteLine("First Person attributes :");
                            Console.WriteLine(" Confidence (" + firstObject.confidence);

                            if (object_detection_parameters.enableObjectTracking)
                            {
                                Console.WriteLine(" Tracking ID: " + firstObject.id + " tracking state: " + firstObject.objectTrackingState +
                                    " / " + firstObject.actionState);
                            }

                            Console.WriteLine(" 3D Position: " + firstObject.position +
                                              " Velocity: " + firstObject.velocity);

                            Console.WriteLine(" Keypoints 2D");
                            // The body part meaning can be obtained by casting the index into a BODY_PARTS
                            // to get the BODY_PARTS index the getIdx function is available
                            for (int i = 0; i < firstObject.keypoints2D.Length; i++)
                            {
                                var kp = firstObject.keypoints2D[i];
                                Console.WriteLine("     " + (sl.BODY_PARTS)i + " " + kp.X + ", " + kp.Y);
                            }

                            // The BODY_PARTS can be link as bones, using sl::BODY_BONES which gives the BODY_PARTS pair for each
                            Console.WriteLine(" Keypoints 3D ");
                            for (int i = 0; i < firstObject.keypoints.Length; i++)
                            {
                                var kp = firstObject.keypoints[i];
                                Console.WriteLine("     " + (sl.BODY_PARTS)i + " " + kp.X + ", " + kp.Y + ", " + kp.Z);
                            }

                            Console.WriteLine();
                            Console.WriteLine("Press 'Enter' to continue...");
                            Console.ReadLine();
                        }
                    }             
                }
            }

            // Disable object detection, positional tracking and close the camera
            zedCamera.DisableObjectDetection();
            zedCamera.DisablePositionalTracking("");
            zedCamera.Close();
        }
Ejemplo n.º 3
0
    public MainWindow(string[] args)
    {
        // Set configuration parameters
        InitParameters init_params = new InitParameters();

        init_params.resolution             = RESOLUTION.HD1080;
        init_params.depthMode              = DEPTH_MODE.ULTRA;
        init_params.coordinateUnits        = UNIT.METER;
        init_params.coordinateSystem       = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP;
        init_params.depthMaximumDistance   = 10f;
        init_params.cameraDisableSelfCalib = true;

        maxDepthDistance = init_params.depthMaximumDistance;
        parseArgs(args, ref init_params);
        // Open the camera
        zedCamera = new Camera(0);
        ERROR_CODE err = zedCamera.Open(ref init_params);

        if (err != ERROR_CODE.SUCCESS)
        {
            Environment.Exit(-1);
        }

        if (zedCamera.CameraModel != sl.MODEL.ZED2)
        {
            Console.WriteLine(" ERROR : Use ZED2 Camera only");
            return;
        }

        // Enable tracking (mandatory for object detection)
        Quaternion quat = Quaternion.Identity;
        Vector3    vec  = Vector3.Zero;

        zedCamera.EnablePositionalTracking(ref quat, ref vec);

        runtimeParameters = new RuntimeParameters();

        // Enable the Objects detection module
        ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters();

        obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis
        isTrackingON = obj_det_params.enableObjectTracking;
        obj_det_params.enable2DMask   = false;
        obj_det_params.imageSync      = true; // the object detection is synchronized to the image
        obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE;

        if (USE_BATCHING)
        {
            batchParameters                = new BatchParameters();
            batchParameters.latency        = 2.0f;
            batchParameters.enable         = true;
            batchHandler                   = new BatchSystemHandler((int)batchParameters.latency * 2);
            obj_det_params.batchParameters = batchParameters;
        }

        zedCamera.EnableObjectDetection(ref obj_det_params);

        // Configure object detection runtime parameters
        obj_runtime_parameters = new ObjectDetectionRuntimeParameters();
        detection_confidence   = 60;
        obj_runtime_parameters.detectionConfidenceThreshold = detection_confidence;
        obj_runtime_parameters.objectClassFilter            = new int[(int)OBJECT_CLASS.LAST];
        obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true);
        //obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(true);
        // To set a specific threshold
        obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST];
        obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = detection_confidence;
        //obj_runtime_parameters.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = detection_confidence;

        // Create ZED Objects filled in the main loop
        objects   = new Objects();
        imageLeft = new sl.Mat();
        int Height = zedCamera.ImageHeight;
        int Width  = zedCamera.ImageWidth;

        displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720));
        Resolution tracksRes = new Resolution(400, (uint)displayRes.height);

        // create a global image to store both image and tracks view
        globalImage = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width + (int)tracksRes.width, OpenCvSharp.MatType.CV_8UC4);
        // retrieve ref on image part
        imageLeftOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect(0, 0, (int)displayRes.width, (int)displayRes.height));
        // retrieve ref on tracks part
        imageTrackOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect((int)displayRes.width, 0, (int)tracksRes.width, (int)tracksRes.height));
        // init an sl::Mat from the ocv image ref (which is in fact the memory of global_image)
        imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU);
        imageRenderLeft = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr());
        imgScale        = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height);

        // Create OpenGL Viewer
        viewer = new GLViewer();

        camWorldPose  = new Pose();
        camCameraPose = new Pose();
        pointCloud    = new sl.Mat();
        pcRes         = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404));
        pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU);

        // 2D tracks
        trackViewGenerator = new TrackingViewer(tracksRes, (int)zedCamera.GetCameraFPS(), maxDepthDistance, 3);
        trackViewGenerator.setCameraCalibration(zedCamera.GetCalibrationParameters());

        window_name = "ZED| 2D View and Birds view";
        Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window
        Cv2.CreateTrackbar("Confidence", window_name, ref detection_confidence, 100);

        // Create OpenGL window
        CreateWindow();
    }
Ejemplo n.º 4
0
        public MainWindow(string[] args)
        {
            // Set configuration parameters
            InitParameters init_params = new InitParameters();

            init_params.resolution       = RESOLUTION.HD1080;
            init_params.cameraFPS        = 30;
            init_params.depthMode        = DEPTH_MODE.ULTRA;
            init_params.coordinateUnits  = UNIT.METER;
            init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP;

            parseArgs(args, ref init_params);
            // Open the camera
            zedCamera = new Camera(0);
            ERROR_CODE err = zedCamera.Open(ref init_params);

            if (err != ERROR_CODE.SUCCESS)
            {
                Environment.Exit(-1);
            }

            if (!(zedCamera.CameraModel == sl.MODEL.ZED2 || zedCamera.CameraModel == sl.MODEL.ZED2i))
            {
                Console.WriteLine(" ERROR : Use ZED2/ZED2i Camera only");
                return;
            }

            // Enable tracking (mandatory for object detection)
            PositionalTrackingParameters positionalTrackingParameters = new PositionalTrackingParameters();

            zedCamera.EnablePositionalTracking(ref positionalTrackingParameters);

            runtimeParameters = new RuntimeParameters();

            // Enable the Objects detection module
            ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters();

            obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis
            isTrackingON = obj_det_params.enableObjectTracking;
            obj_det_params.enable2DMask      = false;
            obj_det_params.enableBodyFitting = true; // smooth skeletons moves
            obj_det_params.imageSync         = true; // the object detection is synchronized to the image
            obj_det_params.detectionModel    = sl.DETECTION_MODEL.HUMAN_BODY_ACCURATE;

            zedCamera.EnableObjectDetection(ref obj_det_params);

            // Create ZED Objects filled in the main loop
            camPose = new sl.Pose();
            objects = new Objects();
            int Height = zedCamera.ImageHeight;
            int Width  = zedCamera.ImageWidth;

            imageLeft  = new Mat();
            displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720));
            imgScale   = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height);
            imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU);

            imageLeftOcv = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr());

            pointCloud = new sl.Mat();
            pcRes      = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404));
            pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU);

            // Create OpenGL Viewer
            viewer = new GLViewer(new Resolution((uint)Width, (uint)Height));

            // Configure object detection runtime parameters
            obj_runtime_parameters = new ObjectDetectionRuntimeParameters();
            obj_runtime_parameters.detectionConfidenceThreshold = 40;

            window_name = "ZED| 2D View";
            Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window

            // Create OpenGL window
            CreateWindow();
        }
Ejemplo n.º 5
0
        static void Main(string[] args)
        {
            // Set Initialization parameters
            InitParameters init_params = new InitParameters();

            init_params.resolution       = RESOLUTION.HD2K;
            init_params.coordinateUnits  = UNIT.METER;
            init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP;
            init_params.depthMode        = DEPTH_MODE.PERFORMANCE;

            Camera zedCamera = new Camera(0);
            // Open the camera
            ERROR_CODE err = zedCamera.Open(ref init_params);

            if (err != ERROR_CODE.SUCCESS)
            {
                Environment.Exit(-1);
            }

            // Enable positional tracking
            PositionalTrackingParameters trackingParams = new PositionalTrackingParameters();

            err = zedCamera.EnablePositionalTracking(ref trackingParams);
            if (err != ERROR_CODE.SUCCESS)
            {
                Environment.Exit(-1);
            }

            // Enable Object Detection
            ObjectDetectionParameters object_detection_parameters = new ObjectDetectionParameters();

            object_detection_parameters.detectionModel       = sl.DETECTION_MODEL.MULTI_CLASS_BOX;
            object_detection_parameters.enableObjectTracking = true;
            err = zedCamera.EnableObjectDetection(ref object_detection_parameters);
            if (err != ERROR_CODE.SUCCESS)
            {
                Environment.Exit(-1);
            }

            // Create Runtime parameters
            RuntimeParameters runtimeParameters = new RuntimeParameters();

            // Create Object Detection frame handle (contains all the objects data)
            sl.Objects object_frame = new sl.Objects();
            // Create object detection runtime parameters (confidence, ...)
            ObjectDetectionRuntimeParameters obj_runtime_parameters = new ObjectDetectionRuntimeParameters();

            obj_runtime_parameters.detectionConfidenceThreshold = 50;


            int i = 0;

            while (i < 1000)
            {
                if (zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS)
                {
                    // Retrieve Objects from Object detection
                    err = zedCamera.RetrieveObjects(ref object_frame, ref obj_runtime_parameters);

                    // Display the data each 10 frames
                    if (i % 10 == 0)
                    {
                        Console.WriteLine("Nb Objects Detection : " + object_frame.numObject);
                        for (int p = 0; p < object_frame.numObject; p++)
                        {
                            Console.WriteLine("Position of object " + p + " : " + object_frame.objectData[p].position + "Tracked? : " + object_frame.objectData[p].objectTrackingState);
                        }
                    }
                    i++;
                }
            }

            // Disable object detection, positional tracking and close the camera
            zedCamera.DisableObjectDetection();
            zedCamera.DisablePositionalTracking("");
            zedCamera.Close();
        }