Пример #1
0
    /// <summary>
    /// Track the devices for SteamVR and applying a delay.
    /// <summary>
    protected void OnNewPoses(TrackedDevicePose_t newpose)
    {
        if (index == EIndex.None)
        {
            return;
        }

        var i = (int)index;

        isValid = false;

        if (!newpose.bDeviceIsConnected)
        {
            return;
        }

        if (!newpose.bPoseIsValid)
        {
            return;
        }

        isValid = true;

        //Get the position and rotation of our tracked device.
        var pose = new SteamVR_Utils.RigidTransform(newpose.mDeviceToAbsoluteTracking);

        //Saving those values.
        RegisterPosition(1, pose.pos, pose.rot);

        //Delay the saved values inside GetValuePosition() by a factor of latencyCompensation in milliseconds.
        sl.Pose p = GetValuePosition(1, (float)(latencyCompensation / 1000.0f));
        transform.localPosition = p.translation;
        transform.localRotation = p.rotation;
    }
Пример #2
0
    Point toCVPoint(Vector3 position, sl.Pose pose)
    {
        // Go to camera current pose
        Quaternion rotation     = pose.rotation;
        Quaternion rotation_inv = Quaternion.Inverse(rotation);
        Vector3    new_position = Vector3.Transform(position - pose.translation, rotation_inv);

        return(new Point((int)((new_position.X - x_min) / x_step + .5f), (int)((new_position.Z - z_min) / z_step + .5f)));
    }
Пример #3
0
    /// <summary>
    /// Compute the delayed position and rotation from the history stored in the poseData dictionary.
    /// </summary>
    /// <param name="keyindex"></param>
    /// <param name="timeDelay"></param>
    /// <returns></returns>
    private sl.Pose GetValuePosition(int keyindex, float timeDelay)
    {
        sl.Pose p = new sl.Pose();
        if (poseData.ContainsKey(keyindex))
        {
            //Get the saved position & rotation.
            p.translation = poseData[keyindex][poseData[keyindex].Count - 1].position;
            p.rotation    = poseData[keyindex][poseData[keyindex].Count - 1].rotation;

            float idealTS = (Time.time - timeDelay);

            for (int i = 0; i < poseData[keyindex].Count; ++i)
            {
                if (poseData[keyindex][i].timestamp > idealTS)
                {
                    int currentIndex = i;
                    if (currentIndex > 0)
                    {
                        //Calculate the time between the pose and the delayed pose.
                        float timeBetween = poseData[keyindex][currentIndex].timestamp - poseData[keyindex][currentIndex - 1].timestamp;
                        float alpha       = ((Time.time - poseData[keyindex][currentIndex - 1].timestamp) - timeDelay) / timeBetween;

                        //Lerp to the next position based on the time determined above.
                        Vector3    pos = Vector3.Lerp(poseData[keyindex][currentIndex - 1].position, poseData[keyindex][currentIndex].position, alpha);
                        Quaternion rot = Quaternion.Lerp(poseData[keyindex][currentIndex - 1].rotation, poseData[keyindex][currentIndex].rotation, alpha);

                        //Apply new values.
                        p             = new sl.Pose();
                        p.translation = pos;
                        p.rotation    = rot;

                        //Add drift correction, but only if the user hasn't disabled it, it's on an actual controller, and the zedRigRoot position won't be affected.
                        if (correctControllerDrift == true &&
                            (deviceToTrack == Devices.LeftController || deviceToTrack == Devices.RightController) &&
                            (zedManager != null && zedManager.IsStereoRig == true && !zedManager.transform.IsChildOf(transform)))
                        {
                            //Compensate for positional drift by measuring the distance between HMD and ZED rig root (the head's center).
#if UNITY_2019_3_OR_NEWER
                            InputDevice head = InputDevices.GetDeviceAtXRNode(XRNode.Head);
                            head.TryGetFeatureValue(CommonUsages.devicePosition, out Vector3 zedhmdposoffset);
#else
                            Vector3 zedhmdposoffset = zedRigRoot.position - InputTracking.GetLocalPosition(XRNode.Head);
#endif
                            p.translation += zedhmdposoffset;
                        }

                        //Removes used elements from the dictionary.
                        poseData[keyindex].RemoveRange(0, currentIndex - 1);
                    }
                    return(p);
                }
            }
        }
        return(p);
    }
Пример #4
0
    /// <summary>
    /// Compute the delayed position and rotation from the history stored in the poseData dictionary.
    /// </summary>
    /// <param name="keyindex"></param>
    /// <param name="timeDelay"></param>
    /// <returns></returns>
    private sl.Pose GetValuePosition(int keyindex, float timeDelay)
    {
        sl.Pose p = new sl.Pose();
        if (poseData.ContainsKey(keyindex))
        {
            //Get the saved position & rotation.
            p.translation = poseData[keyindex][poseData[keyindex].Count - 1].position;
            p.rotation    = poseData[keyindex][poseData[keyindex].Count - 1].rotation;

            float idealTS = (Time.time - timeDelay);

            for (int i = 0; i < poseData[keyindex].Count; ++i)
            {
                if (poseData[keyindex][i].timestamp > idealTS)
                {
                    int currentIndex = i;
                    if (currentIndex > 0)
                    {
                        //Calculate the time between the pose and the delayed pose.
                        float timeBetween = poseData[keyindex][currentIndex].timestamp - poseData[keyindex][currentIndex - 1].timestamp;
                        float alpha       = ((Time.time - poseData[keyindex][currentIndex - 1].timestamp) - timeDelay) / timeBetween;

                        //Lerp to the next position based on the time determined above.
                        Vector3    pos = Vector3.Lerp(poseData[keyindex][currentIndex - 1].position, poseData[keyindex][currentIndex].position, alpha);
                        Quaternion rot = Quaternion.Lerp(poseData[keyindex][currentIndex - 1].rotation, poseData[keyindex][currentIndex].rotation, alpha);

                        //Apply new values.
                        p             = new sl.Pose();
                        p.translation = pos;
                        p.rotation    = rot;

                        //Add drift correction, but only if the zedRigRoot position won't be affected.
                        if ((deviceToTrack == Devices.LeftController || deviceToTrack == Devices.RightController) && !zedManager.transform.IsChildOf(transform))
                        {
                            //Compensate for positional drift by measuring the distance between HMD and ZED rig root (the head's center).
                            Vector3 zedhmdposoffset = zedRigRoot.position - UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.Head);
                            p.translation += zedhmdposoffset;
                        }

                        //Removes used elements from the dictionary.
                        poseData[keyindex].RemoveRange(0, currentIndex - 1);
                    }
                    return(p);
                }
            }
        }
        return(p);
    }
Пример #5
0
    /// <summary>
    ///     Compute the delayed position and rotation from history
    /// </summary>
    /// <param name="indx"></param>
    /// <param name="timeDelay"></param>
    /// <returns></returns>
    private Pose GetValuePosition(int indx, float timeDelay)
    {
        var p = new Pose();

        if (poseData.ContainsKey(indx))
        {
            //Get the saved position & rotation.
            p.translation = poseData[indx][poseData[indx].Count - 1].position;
            p.rotation    = poseData[indx][poseData[indx].Count - 1].rotation;

            var idealTS = Time.time - timeDelay;

            for (var i = 0; i < poseData[indx].Count; ++i)
            {
                if (poseData[indx][i].timestamp > idealTS)
                {
                    var currentIndex = i;
                    if (currentIndex > 0)
                    {
                        //Calculate the time between the pose and the delayed pose.
                        var timeBetween = poseData[indx][currentIndex].timestamp -
                                          poseData[indx][currentIndex - 1].timestamp;
                        var alpha = (Time.time - poseData[indx][currentIndex - 1].timestamp - timeDelay) / timeBetween;
                        //Lerping to the next position based on the time determied above.
                        var pos = Vector3.Lerp(poseData[indx][currentIndex - 1].position,
                                               poseData[indx][currentIndex].position, alpha);
                        var rot = Quaternion.Lerp(poseData[indx][currentIndex - 1].rotation,
                                                  poseData[indx][currentIndex].rotation, alpha);
                        //Applies new values
                        p             = new Pose();
                        p.translation = pos;
                        p.rotation    = rot;
                        //Removes used elements from dictionary.
                        poseData[indx].RemoveRange(0, currentIndex - 1);
                    }

                    return(p);
                }
            }
        }

        return(p);
    }
Пример #6
0
    /// <summary>
    /// Compute the delayed position and rotation from the history stored in the poseData dictionary.
    /// </summary>
    /// <param name="keyindex"></param>
    /// <param name="timeDelay"></param>
    /// <returns></returns>
    private sl.Pose GetValuePosition(int keyindex, float timeDelay)
    {
        sl.Pose p = new sl.Pose();
        if (poseData.ContainsKey(keyindex))
        {
            //Get the saved position & rotation.
            p.translation = poseData[keyindex][poseData[keyindex].Count - 1].position;
            p.rotation    = poseData[keyindex][poseData[keyindex].Count - 1].rotation;

            float idealTS = (Time.time - timeDelay);

            for (int i = 0; i < poseData[keyindex].Count; ++i)
            {
                if (poseData[keyindex][i].timestamp > idealTS)
                {
                    int currentIndex = i;
                    if (currentIndex > 0)
                    {
                        //Calculate the time between the pose and the delayed pose.
                        float timeBetween = poseData[keyindex][currentIndex].timestamp - poseData[keyindex][currentIndex - 1].timestamp;
                        float alpha       = ((Time.time - poseData[keyindex][currentIndex - 1].timestamp) - timeDelay) / timeBetween;

                        //Lerp to the next position based on the time determined above.
                        Vector3    pos = Vector3.Lerp(poseData[keyindex][currentIndex - 1].position, poseData[keyindex][currentIndex].position, alpha);
                        Quaternion rot = Quaternion.Lerp(poseData[keyindex][currentIndex - 1].rotation, poseData[keyindex][currentIndex].rotation, alpha);

                        //Apply new values.
                        p             = new sl.Pose();
                        p.translation = pos;
                        p.rotation    = rot;

                        //Removes used elements from the dictionary.
                        poseData[keyindex].RemoveRange(0, currentIndex - 1);
                    }
                    return(p);
                }
            }
        }
        return(p);
    }
Пример #7
0
    public void generate_view(ref sl.Objects objects, sl.Pose current_camera_pose, ref OpenCvSharp.Mat tracking_view, bool tracking_enabled)
    {
        // To get position in WORLD reference
        for (int i = 0; i < objects.numObject; i++)
        {
            sl.ObjectData obj = objects.objectData[i];

            Vector3 pos     = obj.position;
            Vector3 new_pos = Vector3.Transform(pos, current_camera_pose.rotation) + current_camera_pose.translation;
            obj.position = new_pos;
        }

        // Initialize visualization
        if (!has_background_ready)
        {
            generateBackground();
        }

        background.CopyTo(tracking_view);
        // Scale
        drawScale(ref tracking_view);

        if (tracking_enabled)
        {
            // First add new points, and remove the ones that are too old
            ulong current_timestamp = objects.timestamp;
            addToTracklets(ref objects);
            detectUnchangedTrack(current_timestamp);
            pruneOldPoints(current_timestamp);

            // Draw all tracklets
            drawTracklets(ref tracking_view, current_camera_pose);
        }
        else
        {
            drawPosition(ref objects, ref tracking_view, current_camera_pose);
        }
    }
Пример #8
0
    void drawPosition(ref sl.Objects objects, ref OpenCvSharp.Mat tracking_view, sl.Pose current_camera_pose)
    {
        for (int i = 0; i < objects.numObject; i++)
        {
            sl.ObjectData obj             = objects.objectData[i];
            Scalar        generated_color = Utils.generateColorClass_u((int)obj.label);

            // Point = person || Rect = Vehicle
            switch (obj.label)
            {
            case sl.OBJECT_CLASS.PERSON:
                Cv2.Circle(tracking_view, toCVPoint(obj.position, current_camera_pose), 5, generated_color, 5);
                break;

            case sl.OBJECT_CLASS.VEHICLE:
            {
                if (obj.boundingBox.Length > 0)
                {
                    Point rect_center         = toCVPoint(obj.position, current_camera_pose);
                    int   square_size         = 10;
                    Point top_left_corner     = rect_center - new Point(square_size, square_size * 2);
                    Point right_bottom_corner = rect_center + new Point(square_size, square_size * 2);

                    Cv2.Rectangle(tracking_view, top_left_corner, right_bottom_corner, generated_color, Cv2.FILLED);
                }
                break;
            }

            case sl.OBJECT_CLASS.LAST:
                break;

            default:
                break;
            }
        }
    }
Пример #9
0
    Point toCVPoint(TrackPoint position, sl.Pose pose)
    {
        Vector3 sl_position = position.toVector3();

        return(toCVPoint(sl_position, pose));
    }
Пример #10
0
    // vizualisation methods
    void drawTracklets(ref OpenCvSharp.Mat tracking_view, sl.Pose current_camera_pose)
    {
        foreach (Tracklet track in tracklets)
        {
            if (track.tracking_state != sl.OBJECT_TRACKING_STATE.OK)
            {
                Console.WriteLine("not ok");
                continue;
            }
            if (track.positions_to_draw.Count < min_length_to_draw)
            {
                //Console.WriteLine("too small " + track.positions_to_draw.Count);
                continue;
            }

            Scalar clr = Utils.generateColorID_u((int)track.id);

            int        track_size     = track.positions_to_draw.Count;
            TrackPoint start_point    = track.positions_to_draw[0];
            Point      cv_start_point = toCVPoint(start_point, current_camera_pose);
            TrackPoint end_point      = track.positions_to_draw[0];
            for (int point_index = 1; point_index < track_size; ++point_index)
            {
                end_point = track.positions_to_draw[point_index];
                Point cv_end_point = toCVPoint(track.positions_to_draw[point_index], current_camera_pose);

                // Check point status
                if (start_point.tracking_state == TrackPointState.OFF || end_point.tracking_state == TrackPointState.OFF)
                {
                    continue;
                }

                Cv2.Line(tracking_view, cv_start_point, cv_end_point, clr, 4);
                start_point    = end_point;
                cv_start_point = cv_end_point;
            }

            // Current position, visualized as a point, only for alived track
            // Point = person || Square = Vehicle
            if (track.is_alive)
            {
                switch (track.object_type)
                {
                case sl.OBJECT_CLASS.PERSON:
                    Cv2.Circle(tracking_view, toCVPoint(track.positions_to_draw[track.positions_to_draw.Count - 1], current_camera_pose), 5, clr, 5);
                    break;

                case sl.OBJECT_CLASS.VEHICLE:
                {
                    Point rect_center         = toCVPoint(track.positions_to_draw[track.positions_to_draw.Count - 1], current_camera_pose);
                    int   square_size         = 10;
                    Point top_left_corner     = rect_center - new Point(square_size, square_size * 2);
                    Point right_bottom_corner = rect_center + new Point(square_size, square_size * 2);
                    Cv2.Rectangle(tracking_view, top_left_corner, right_bottom_corner, clr, Cv2.FILLED);

                    break;
                }

                case sl.OBJECT_CLASS.LAST:
                    break;

                default:
                    break;
                }
            }
        }
    }