void addToTracklets(ref sl.Objects objects)
    {
        ulong current_timestamp = objects.timestamp;

        for (int i = 0; i < objects.numObject; i++)
        {
            sl.ObjectData obj = objects.objectData[i];
            int           id  = obj.id;

            if ((obj.objectTrackingState != sl.OBJECT_TRACKING_STATE.OK) || float.IsInfinity(obj.position.X))
            {
                continue;
            }

            bool new_object = true;
            foreach (Tracklet track in tracklets)
            {
                if (track.id == id && track.is_alive)
                {
                    new_object = false;
                    track.addDetectedPoint(obj, current_timestamp, smoothing_window_size);
                }
            }

            // In case this object does not belong to existing tracks
            if (new_object)
            {
                Tracklet new_track = new Tracklet(obj, obj.label, current_timestamp);
                tracklets.Add(new_track);
            }
        }
    }
 public Tracklet(sl.ObjectData obj, sl.OBJECT_CLASS type, ulong ts = 0)
 {
     id = obj.id;
     positions.Add(new TrackPoint(obj.position, obj.objectTrackingState, ts));
     positions_to_draw.Add(new TrackPoint(obj.position, obj.objectTrackingState, ts));
     tracking_state          = obj.objectTrackingState;
     last_detected_timestamp = ts;
     recovery_cpt            = recovery_length;
     is_alive    = true;
     object_type = type;
 }
    public static void render_2D(ref OpenCvSharp.Mat left_display, sl.float2 img_scale, ref sl.Objects objects, bool render_mask, bool isTrackingON)
    {
        OpenCvSharp.Mat  overlay    = left_display.Clone();
        OpenCvSharp.Rect roi_render = new OpenCvSharp.Rect(0, 0, left_display.Size().Width, left_display.Size().Height);

        OpenCvSharp.Mat mask = new OpenCvSharp.Mat(left_display.Rows, left_display.Cols, OpenCvSharp.MatType.CV_8UC1);

        int line_thickness = 2;

        for (int i = 0; i < objects.numObject; i++)
        {
            sl.ObjectData obj = objects.objectData[i];
            if (Utils.renderObject(obj, isTrackingON))
            {
                OpenCvSharp.Scalar base_color = Utils.generateColorID_u(obj.id);

                // Display image scale bouding box 2d
                if (obj.boundingBox2D.Length < 4)
                {
                    continue;
                }

                Point top_left_corner     = Utils.cvt(obj.boundingBox2D[0], img_scale);
                Point top_right_corner    = Utils.cvt(obj.boundingBox2D[1], img_scale);
                Point bottom_right_corner = Utils.cvt(obj.boundingBox2D[2], img_scale);
                Point bottom_left_corner  = Utils.cvt(obj.boundingBox2D[3], img_scale);

                // Create of the 2 horizontal lines
                Cv2.Line(left_display, top_left_corner, top_right_corner, base_color, line_thickness);
                Cv2.Line(left_display, bottom_left_corner, bottom_right_corner, base_color, line_thickness);
                // Creation of two vertical lines
                Utils.drawVerticalLine(ref left_display, bottom_left_corner, top_left_corner, base_color, line_thickness);
                Utils.drawVerticalLine(ref left_display, bottom_right_corner, top_right_corner, base_color, line_thickness);

                // Scaled ROI
                OpenCvSharp.Rect roi = new OpenCvSharp.Rect(top_left_corner.X, top_left_corner.Y, (int)top_right_corner.DistanceTo(top_left_corner), (int)bottom_right_corner.DistanceTo(top_right_corner));

                overlay.SubMat(roi).SetTo(base_color);

                sl.float2 position_image = getImagePosition(obj.boundingBox2D, img_scale);
                Cv2.PutText(left_display, obj.label.ToString(), new Point(position_image.x - 20, position_image.y - 12), HersheyFonts.HersheyComplexSmall, 0.5f, new Scalar(255, 255, 255, 255), 1);

                if (!float.IsInfinity(obj.position.Z))
                {
                    string text = Math.Abs(obj.position.Z).ToString("0.##M");
                    Cv2.PutText(left_display, text, new Point(position_image.x - 20, position_image.y), HersheyFonts.HersheyComplexSmall, 0.5, new Scalar(255, 255, 255, 255), 1);
                }
            }
        }

        // Here, overlay is as the left image, but with opaque masks on each detected objects
        Cv2.AddWeighted(left_display, 0.7, overlay, 0.3, 0.0, left_display);
    }
    public void addDetectedPoint(sl.ObjectData obj, ulong ts, int smoothing_window_size = 0)
    {
        if (positions.Count > 0)
        {
            if (positions[positions.Count - 1].tracking_state == TrackPointState.PREDICTED)
            {
                recovery_cpt = 0;
            }
            else
            {
                ++recovery_cpt;
            }
        }

        positions.Add(new TrackPoint(obj.position, TrackPointState.OK, ts));
        tracking_state          = obj.objectTrackingState;
        last_detected_timestamp = ts;


        positions_to_draw.Add(new TrackPoint(obj.position, TrackPointState.OK, ts));
    }
    public void generate_view(ref sl.Objects objects, sl.Pose current_camera_pose, ref OpenCvSharp.Mat tracking_view, bool tracking_enabled)
    {
        // To get position in WORLD reference
        for (int i = 0; i < objects.numObject; i++)
        {
            sl.ObjectData obj = objects.objectData[i];

            Vector3 pos     = obj.position;
            Vector3 new_pos = Vector3.Transform(pos, current_camera_pose.rotation) + current_camera_pose.translation;
            obj.position = new_pos;
        }

        // Initialize visualization
        if (!has_background_ready)
        {
            generateBackground();
        }

        background.CopyTo(tracking_view);
        // Scale
        drawScale(ref tracking_view);

        if (tracking_enabled)
        {
            // First add new points, and remove the ones that are too old
            ulong current_timestamp = objects.timestamp;
            addToTracklets(ref objects);
            detectUnchangedTrack(current_timestamp);
            pruneOldPoints(current_timestamp);

            // Draw all tracklets
            drawTracklets(ref tracking_view, current_camera_pose);
        }
        else
        {
            drawPosition(ref objects, ref tracking_view, current_camera_pose);
        }
    }
    void drawPosition(ref sl.Objects objects, ref OpenCvSharp.Mat tracking_view, sl.Pose current_camera_pose)
    {
        for (int i = 0; i < objects.numObject; i++)
        {
            sl.ObjectData obj             = objects.objectData[i];
            Scalar        generated_color = Utils.generateColorClass_u((int)obj.label);

            // Point = person || Rect = Vehicle
            switch (obj.label)
            {
            case sl.OBJECT_CLASS.PERSON:
                Cv2.Circle(tracking_view, toCVPoint(obj.position, current_camera_pose), 5, generated_color, 5);
                break;

            case sl.OBJECT_CLASS.VEHICLE:
            {
                if (obj.boundingBox.Length > 0)
                {
                    Point rect_center         = toCVPoint(obj.position, current_camera_pose);
                    int   square_size         = 10;
                    Point top_left_corner     = rect_center - new Point(square_size, square_size * 2);
                    Point right_bottom_corner = rect_center + new Point(square_size, square_size * 2);

                    Cv2.Rectangle(tracking_view, top_left_corner, right_bottom_corner, generated_color, Cv2.FILLED);
                }
                break;
            }

            case sl.OBJECT_CLASS.LAST:
                break;

            default:
                break;
            }
        }
    }
    public static void render_2D(ref OpenCvSharp.Mat left_display, sl.float2 img_scale, ref sl.Objects objects, bool showOnlyOK)
    {
        OpenCvSharp.Mat  overlay    = left_display.Clone();
        OpenCvSharp.Rect roi_render = new OpenCvSharp.Rect(1, 1, left_display.Size().Width, left_display.Size().Height);

        for (int i = 0; i < objects.numObject; i++)
        {
            sl.ObjectData obj = objects.objectData[i];
            if (renderObject(obj, showOnlyOK))
            {
                // Draw Skeleton bones
                OpenCvSharp.Scalar base_color = generateColorID(obj.id);
                foreach (var part in SKELETON_BONES)
                {
                    var kp_a = cvt(obj.keypoints2D[(int)part.Item1], img_scale);
                    var kp_b = cvt(obj.keypoints2D[(int)part.Item2], img_scale);
                    if (roi_render.Contains(kp_a) && roi_render.Contains(kp_b))
                    {
                        Cv2.Line(left_display, kp_a, kp_b, base_color, 1, LineTypes.AntiAlias);
                    }
                }

                var hip_left  = obj.keypoints2D[(int)sl.BODY_PARTS.LEFT_HIP];
                var hip_right = obj.keypoints2D[(int)sl.BODY_PARTS.RIGHT_HIP];
                var spine     = (hip_left + hip_right) / 2;
                var neck      = obj.keypoints2D[(int)sl.BODY_PARTS.NECK];

                if (hip_left.X > 0 && hip_left.Y > 0 && hip_right.X > 0 && hip_right.Y > 0 && neck.X > 0 && neck.Y > 0)
                {
                    var spine_a = cvt(spine, img_scale);
                    var spine_b = cvt(neck, img_scale);
                    if (roi_render.Contains(spine_a) && roi_render.Contains(spine_b))
                    {
                        Cv2.Line(left_display, spine_a, spine_b, base_color, 1, LineTypes.AntiAlias);
                    }
                }

                // Draw Skeleton joints
                foreach (var kp in obj.keypoints2D)
                {
                    Point cv_kp = cvt(kp, img_scale);
                    if (roi_render.Contains(cv_kp))
                    {
                        Cv2.Circle(left_display, cv_kp, 3, base_color, -1);
                    }
                }

                if (hip_left.X > 0 && hip_left.Y > 0 && hip_right.X > 0 && hip_right.Y > 0)
                {
                    Point cv_spine = cvt(spine, img_scale);
                    if (roi_render.Contains(cv_spine))
                    {
                        Cv2.Circle(left_display, cv_spine, 3, base_color, -1);
                    }
                }
            }
        }

        // Here, overlay is as the left image, but with opaque masks on each detected objects
        Cv2.AddWeighted(left_display, 0.9, overlay, 0.1, 0.0, left_display);
    }