/// <summary>
    /// Set controller's transformation based on received pose.
    /// </summary>
    /// <param name="pose">Received Tango pose data.</param>
    private void _UpdateTransformationFromPose(TangoPoseData pose)
    {
        // Remember the previous position, so you can do delta motion
        m_prevTangoPosition = m_tangoPosition;
        m_prevTangoRotation = m_tangoRotation;

        // The callback pose is for device with respect to start of service pose.
        if (pose.status_code == TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID)
        {
            Vector3    position;
            Quaternion rotation;
            TangoSupport.TangoPoseToWorldTransform(pose, out position, out rotation);

            m_uwTuc = Matrix4x4.TRS(position, rotation, Vector3.one);
            Matrix4x4 uwOffsetTuc = m_uwOffsetTuw * m_uwTuc;

            m_tangoPosition = uwOffsetTuc.GetColumn(3);
            m_tangoRotation = Quaternion.LookRotation(uwOffsetTuc.GetColumn(2), uwOffsetTuc.GetColumn(1));

            // Other pose data -- Pose count gets reset if pose status just became valid.
            if (pose.status_code != m_poseStatus)
            {
                m_poseCount = 0;
            }

            m_poseCount++;

            // Other pose data -- Pose delta time.
            m_poseDeltaTime = (float)pose.timestamp - m_poseTimestamp;
            m_poseTimestamp = (float)pose.timestamp;
        }

        m_poseStatus = pose.status_code;

        if (m_clutchActive)
        {
            // When clutching, preserve position.
            m_tangoPosition = m_prevTangoPosition;

            // When clutching, preserve yaw, keep changes in pitch, roll.
            Vector3 rotationAngles = m_tangoRotation.eulerAngles;
            rotationAngles.y            = m_prevTangoRotation.eulerAngles.y;
            m_tangoRotation.eulerAngles = rotationAngles;
        }

        // Calculate final position and rotation deltas and apply them.
        Vector3    deltaPosition = m_tangoPosition - m_prevTangoPosition;
        Quaternion deltaRotation = m_tangoRotation * Quaternion.Inverse(m_prevTangoRotation);

        if (m_characterMotion && m_characterController != null)
        {
            m_characterController.Move(deltaPosition);
            transform.rotation = deltaRotation * transform.rotation;
        }
        else
        {
            transform.position = transform.position + deltaPosition;
            transform.rotation = deltaRotation * transform.rotation;
        }
    }
Пример #2
0
    /// <summary>
    /// Estimates the depth of a point on a screen, based on nearest neighbors.
    /// </summary>
    /// <returns>
    /// <c>true</c> if a successful depth estimate was obtained.
    /// </returns>
    /// <param name="cam">The Unity camera.</param>
    /// <param name="pos">The point in pixel coordinates to perform depth estimation.</param>
    /// <param name="colorCameraPoint">
    /// The point (x, y, z), where (x, y) is the back-projection of the UV
    /// coordinates to the color camera space and z is the z coordinate of
    /// the point in the point cloud nearest to the user selection after
    /// projection onto the image plane. If there is not a point cloud point
    /// close to the user selection after projection onto the image plane,
    /// then the point will be set to (0.0, 0.0, 0.0) and isValidPoint will
    /// be set to false.
    /// </param>
    public bool EstimateDepthOnScreen(Camera cam, Vector2 pos, out Vector3 colorCameraPoint)
    {
        // Set up parameters
        Matrix4x4 colorCameraTUnityWorld = m_colorCameraTUnityCamera * cam.transform.worldToLocalMatrix;
        Vector2   normalizedPos          = cam.ScreenToViewportPoint(pos);

        // If the camera has a TangoARScreen attached, it is not displaying the entire color camera image.  Correct
        // the normalized coordinates by taking the clipping into account.
        TangoARScreen arScreen = cam.gameObject.GetComponent <TangoARScreen>();

        if (arScreen != null)
        {
            normalizedPos = arScreen.ViewportPointToCameraImagePoint(normalizedPos);
        }

        bool isValidPoint;
        int  returnType = TangoSupport.ScreenCoordinateToWorldNearestNeighbor(
            m_points,
            m_pointsCount,
            m_depthTimestamp,
            m_colorCameraIntrinsics,
            ref colorCameraTUnityWorld,
            normalizedPos,
            out colorCameraPoint,
            out isValidPoint);

        if (returnType != Common.ErrorType.TANGO_SUCCESS)
        {
            Debug.LogErrorFormat("TangoSupport.ScreenCoordinateToWorldNearestNeighbor failed with error code {0}.",
                                 returnType);
        }

        return((returnType == Common.ErrorType.TANGO_SUCCESS) && isValidPoint);
    }
    /// <summary>
    /// Given a screen coordinate, find a plane that most closely fits depth values in that area.
    ///
    /// This assumes you are using this in an AR context.
    /// </summary>
    /// <returns><c>true</c>, if plane was found, <c>false</c> otherwise.</returns>
    /// <param name="cam">The Unity camera.</param>
    /// <param name="pos">The point in screen space to perform detection on.</param>
    /// <param name="planeCenter">Filled in with the center of the plane in Unity world space.</param>
    /// <param name="plane">Filled in with a model of the plane in Unity world space.</param>
    public bool FindPlane(Camera cam, Vector2 pos, out Vector3 planeCenter, out Plane plane)
    {
        Matrix4x4 colorCameraTUnityWorld = m_colorCameraTUnityCamera * cam.transform.worldToLocalMatrix;
        Vector2   normalizedPos          = cam.ScreenToViewportPoint(pos);

        // If the camera has a TangoARScreen attached, it is not displaying the entire color camera image.  Correct
        // the normalized coordinates by taking the clipping into account.
        TangoARScreen arScreen = cam.gameObject.GetComponent <TangoARScreen>();

        if (arScreen != null)
        {
            normalizedPos = arScreen.ViewportPointToCameraImagePoint(normalizedPos);
        }

        int returnValue = TangoSupport.FitPlaneModelNearClick(
            m_points, m_pointsCount, m_depthTimestamp, m_colorCameraIntrinsics, ref colorCameraTUnityWorld, normalizedPos,
            out planeCenter, out plane);

        if (returnValue == Common.ErrorType.TANGO_SUCCESS)
        {
            return(true);
        }
        else
        {
            return(false);
        }
    }
Пример #4
0
 private void StartTango()
 {
     tango = new Tango(this, new Runnable(() =>
     {
         Log.Debug(Tag, "TangoRunnable");
         try
         {
             TangoSupport.Initialize();
             var tangoConfig = GetTangoConfig(tango);
             tango.Connect(tangoConfig);
             TangoAddListeners();
             tango.ConnectTextureId(TangoCameraIntrinsics.TangoCameraColor, -1);
         }
         catch (TangoOutOfDateException e)
         {
             Log.Error(Tag, "TangoOutOfDateException", e);
         }
         catch (TangoErrorException e)
         {
             Log.Error(Tag, "TangoErrorException", e);
         }
         catch (TangoInvalidException e)
         {
             Log.Error(Tag, "TangoInvalidException", e);
         }
     }));
 }
Пример #5
0
    /// <summary>
    /// Fix this! Doesn't output correct values. Input matrix probably not correct.
    /// Generates the depth map using nearest neighbor upsampling.
    /// </summary>
    private void GenerateDepthMap_NearestNeighbor(ref TangoUnityDepth tangoUnityDepth)
    {
        TangoPoseData            poseData = new TangoPoseData();
        TangoCoordinateFramePair pair;

        pair.baseFrame   = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE;
        pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE;
        PoseProvider.GetPoseAtTime(poseData, tangoUnityDepth.m_timestamp, pair);
        if (poseData.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID)
        {
            return;
        }
        Vector3    position;
        Quaternion rotation;

        TangoSupport.TangoPoseToWorldTransform(poseData, out position, out rotation);

        Matrix4x4 ccWorld          = Matrix4x4.TRS(position, rotation, Vector3.one);
        bool      isValid          = false;
        Vector3   colorCameraPoint = new Vector3();

        for (int i = 0; i < _depthMapWidth; i++)
        {
            for (int j = 0; j < _depthMapHeight; j++)
            {
                if (TangoSupport.ScreenCoordinateToWorldNearestNeighbor(
                        _PointCloud.m_points, _PointCloud.m_pointsCount,
                        tangoUnityDepth.m_timestamp,
                        _ccIntrinsics,
                        ref ccWorld,
                        new Vector2(i / (float)_depthMapWidth, j / (float)_depthMapHeight),
                        out colorCameraPoint, out isValid) == Common.ErrorType.TANGO_INVALID)
                {
                    _depthTexture.SetPixel(i, j, Color.red);
                    continue;
                }

                if (isValid)
                {
                    float c = 1 - colorCameraPoint.z / 4.5f;
                    _depthTexture.SetPixel(i, j, new Color(c, c, c));
                }
                else
                {
                    _depthTexture.SetPixel(i, j, Color.black);
                }
            }
        }
        _depthTexture.Apply();
        _DepthMapQuad.sharedMaterial.mainTexture = _depthTexture;

        //_debugMessage = "DepthAvailable: " + _waitingForDepth.ToString() + "\n" +
        //    " points: " + _PointCloud.m_pointsCount + "\n" +
        //    " timestamp: " + tangoUnityDepth.m_timestamp.ToString("0.00") + "\n" +
        //    " XYZ:" + colorCameraPoint.ToString();
    }
Пример #6
0
    public void OnTangoImageAvailableEventHandler(Tango.TangoEnums.TangoCameraId cameraId,
                                                  Tango.TangoUnityImageData imageBuffer)
    {
        TangoSupport.DetectMarkers(imageBuffer, cameraId,
                                   TangoSupport.MarkerType.QRCODE, MARKER_SIZE, markerList);

        if (markerList.Count > 0)
        {
            TangoSupport.Marker marker = markerList[0];

            qrcodePlane.transform.position = marker.m_translation;
            qrcodePlane.transform.rotation = marker.m_orientation;

            var bottomToTop = marker.m_corner3DP3 - marker.m_corner3DP0;
            var leftToRight = marker.m_corner3DP1 - marker.m_corner3DP0;
            plane.transform.localScale = new Vector3(leftToRight.magnitude, 1, bottomToTop.magnitude) * 0.1f;
        }
    }
Пример #7
0
    /// <summary>
    /// Given a screen coordinate, finds a plane that most closely fits the
    /// depth values in that area.
    ///
    /// This function is slow, as it looks at every single point in the point
    /// cloud. Avoid calling this more than once a frame. This also assumes the
    /// Unity camera intrinsics match the device's color camera.
    /// </summary>
    /// <returns><c>true</c>, if a plane was found; <c>false</c> otherwise.</returns>
    /// <param name="cam">The Unity camera.</param>
    /// <param name="pos">The point in screen space to perform detection on.</param>
    /// <param name="planeCenter">Filled in with the center of the plane in Unity world space.</param>
    /// <param name="plane">Filled in with a model of the plane in Unity world space.</param>
    public bool FindPlane(Camera cam, Vector2 pos, out Vector3 planeCenter, out Plane plane)
    {
        if (m_pointsCount == 0)
        {
            // No points to check, maybe not connected to the service yet
            planeCenter = Vector3.zero;
            plane       = new Plane();
            return(false);
        }

        Vector2 normalizedPos = cam.ScreenToViewportPoint(pos);

        // If the camera has a TangoARScreen attached, it is not displaying the entire color camera image.  Correct
        // the normalized coordinates by taking the clipping into account.
        TangoARScreen arScreen = cam.gameObject.GetComponent <TangoARScreen>();

        if (arScreen != null)
        {
            normalizedPos = arScreen.ViewportPointToCameraImagePoint(normalizedPos);
        }

        DVector4 planeModel = new DVector4();

        bool returnValue = TangoSupport.FitPlaneModelNearClick(
            m_mostRecentPointCloud,
            arScreen.m_screenUpdateTime,
            normalizedPos,
            out planeCenter,
            out planeModel);

        planeCenter = m_mostRecentUnityWorldTDepthCamera.MultiplyPoint3x4(planeCenter);
        Vector3 normal = new Vector3((float)planeModel.x,
                                     (float)planeModel.y,
                                     (float)planeModel.z);

        normal = m_mostRecentUnityWorldTDepthCamera.MultiplyVector(normal);
        Vector3.Normalize(normal);
        float distance = (float)planeModel.w / normal.magnitude;

        plane = new Plane(normal, distance);

        return(returnValue);
    }
Пример #8
0
    /// <summary>
    /// Given a screen coordinate, find a plane that most closely fits depth values in that area.
    ///
    /// This assumes you are using this in an AR context.
    /// </summary>
    /// <returns><c>true</c>, if plane was found, <c>false</c> otherwise.</returns>
    /// <param name="cam">The Unity camera.</param>
    /// <param name="pos">The point in screen space to perform detection on.</param>
    /// <param name="planeCenter">Filled in with the center of the plane in Unity world space.</param>
    /// <param name="plane">Filled in with a model of the plane in Unity world space.</param>
    public bool FindPlane(Camera cam, Vector2 pos, out Vector3 planeCenter, out Plane plane)
    {
        Matrix4x4 unityWorldTColorCamera = m_unityWorldTStartService * m_startServiceTDevice * Matrix4x4.Inverse(m_imuTDevice) * m_imuTColorCamera;
        Matrix4x4 colorCameraTUnityWorld = unityWorldTColorCamera.inverse;

        Vector2 normalizedPos = cam.ScreenToViewportPoint(pos);

        int returnValue = TangoSupport.FitPlaneModelNearClick(
            m_points, m_pointsCount, m_pointsTimestamp, m_colorCameraIntrinsics, ref colorCameraTUnityWorld, normalizedPos,
            out planeCenter, out plane);

        if (returnValue == Common.ErrorType.TANGO_SUCCESS)
        {
            return(true);
        }
        else
        {
            return(false);
        }
    }
Пример #9
0
    /// <summary>
    /// Detect one or more markers in the input image.
    /// </summary>
    /// <param name="cameraId">
    /// Returned camera ID.
    /// </param>
    /// <param name="imageBuffer">
    /// Color camera image buffer.
    /// </param>
    public void OnTangoImageAvailableEventHandler(TangoEnums.TangoCameraId cameraId,
                                                  TangoUnityImageData imageBuffer)
    {
        TangoSupport.DetectMarkers(imageBuffer, cameraId,
                                   TangoSupport.MarkerType.ARTAG, MARKER_SIZE, m_markerList);

        for (int i = 0; i < m_markerList.Count; ++i)
        {
            TangoSupport.Marker marker = m_markerList[i];
            int markerId    = Convert.ToInt32(marker.m_content);
            int planetIndex = (markerId - 1) % m_planets.Length;

            if (m_planets[planetIndex] == null)
            {
                m_planets[planetIndex] = Instantiate <GameObject>(m_planetsPrefabs[planetIndex]);
            }

            m_planets[planetIndex].transform.position = marker.m_translation;
            m_planets[planetIndex].transform.rotation = marker.m_orientation;
        }
    }
Пример #10
0
    /// <summary>
    /// Given a screen coordinate, finds a plane that most closely fits the
    /// depth values in that area.
    ///
    /// This function is slow, as it looks at every single point in the point
    /// cloud. Avoid calling this more than once a frame. This also assumes the
    /// Unity camera intrinsics match the device's color camera.
    /// </summary>
    /// <returns><c>true</c>, if a plane was found; <c>false</c> otherwise.</returns>
    /// <param name="cam">The Unity camera.</param>
    /// <param name="pos">The point in screen space to perform detection on.</param>
    /// <param name="planeCenter">Filled in with the center of the plane in Unity world space.</param>
    /// <param name="plane">Filled in with a model of the plane in Unity world space.</param>
    public bool FindPlane(Camera cam, Vector2 pos, out Vector3 planeCenter, out Plane plane)
    {
        if (m_pointsCount == 0)
        {
            // No points to check, maybe not connected to the service yet
            planeCenter = Vector3.zero;
            plane       = new Plane();
            return(false);
        }

        Matrix4x4 colorCameraTUnityWorld = m_colorCameraTUnityCamera * cam.transform.worldToLocalMatrix;
        Vector2   normalizedPos          = cam.ScreenToViewportPoint(pos);

        // If the camera has a TangoARScreen attached, it is not displaying the entire color camera image.  Correct
        // the normalized coordinates by taking the clipping into account.
        TangoARScreen arScreen = cam.gameObject.GetComponent <TangoARScreen>();

        if (arScreen != null)
        {
            normalizedPos = arScreen.ViewportPointToCameraImagePoint(normalizedPos);
        }

        TangoCameraIntrinsics alignedIntrinsics = new TangoCameraIntrinsics();

        VideoOverlayProvider.GetDeviceOrientationAlignedIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR,
                                                                   alignedIntrinsics);
        int returnValue = TangoSupport.FitPlaneModelNearClick(
            m_points, m_pointsCount, m_depthTimestamp, alignedIntrinsics, ref colorCameraTUnityWorld,
            normalizedPos, out planeCenter, out plane);

        if (returnValue == Common.ErrorType.TANGO_SUCCESS)
        {
            return(true);
        }
        else
        {
            return(false);
        }
    }
Пример #11
0
        protected override void OnResume()
        {
            Log.Debug(Tag, "OnResume");
            base.OnResume();

            _tango = new Tango(this, new Runnable(() =>
            {
                Log.Debug(Tag, "TangoRunnable");
                //try
                //{
                TangoSupport.Initialize();
                _tangoConfig = SetupTangoConfig(_tango);
                _tango.Connect(_tangoConfig);
                startupTango();
                //}
                //catch (TangoOutOfDateException e)
                //{
                //    Log.Error(Tag, GetString(R.
                //    string.exception_out_of_date),
                //    e)
                //    ;
                //}
                //catch (TangoErrorException e)
                //{
                //    Log.Error(Tag, GetString(R.
                //    string.exception_tango_error),
                //    e)
                //    ;
                //}
                //catch (TangoInvalidException e)
                //{
                //    Log.Error(Tag, GetString(R.
                //    string.exception_tango_invalid),
                //    e)
                //    ;
                //}
            }));
        }
Пример #12
0
    /// <summary>
    /// Estimates the depth of a point on a screen, based on nearest neighbors.
    /// </summary>
    /// <returns>
    /// <c>true</c> if a successful depth estimate was obtained.
    /// </returns>
    /// <param name="cam">The Unity camera.</param>
    /// <param name="pos">The point in pixel coordinates to perform depth estimation.</param>
    /// <param name="colorCameraPoint">
    /// The point (x, y, z), where (x, y) is the back-projection of the UV
    /// coordinates to the color camera space and z is the z coordinate of
    /// the point in the point cloud nearest to the user selection after
    /// projection onto the image plane. If there is not a point cloud point
    /// close to the user selection after projection onto the image plane,
    /// then the point will be set to (0.0, 0.0, 0.0) and isValidPoint will
    /// be set to false.
    /// </param>
    public bool EstimateDepthOnScreen(Camera cam, Vector2 pos, out Vector3 colorCameraPoint)
    {
        // Set up parameters
        Matrix4x4 colorCameraTUnityWorld = TangoSupport.COLOR_CAMERA_T_UNITY_CAMERA * cam.transform.worldToLocalMatrix;
        Vector2   normalizedPos          = cam.ScreenToViewportPoint(pos);

        // If the camera has a TangoARScreen attached, it is not displaying the entire color camera image.  Correct
        // the normalized coordinates by taking the clipping into account.
        TangoARScreen arScreen = cam.gameObject.GetComponent <TangoARScreen>();

        if (arScreen != null)
        {
            normalizedPos = arScreen.ViewportPointToCameraImagePoint(normalizedPos);
        }

        bool returnValue = TangoSupport.ScreenCoordinateToWorldNearestNeighbor(
            m_mostRecentPointCloud,
            arScreen.m_screenUpdateTime,
            normalizedPos,
            out colorCameraPoint);

        return(returnValue);
    }
    /// <summary>
    /// Detect one or more markers in the input image.
    /// </summary>
    /// <param name="cameraId">
    /// Returned camera ID.
    /// </param>
    /// <param name="imageBuffer">
    /// Color camera image buffer.
    /// </param>
    public void OnTangoImageAvailableEventHandler(TangoEnums.TangoCameraId cameraId,
                                                  TangoUnityImageData imageBuffer)
    {
        TangoSupport.DetectMarkers(imageBuffer, cameraId,
                                   TangoSupport.MarkerType.ARTAG, MARKER_SIZE, m_markerList);

        for (int i = 0; i < m_markerList.Count; ++i)
        {
            TangoSupport.Marker marker = m_markerList[i];

            if (m_markerObjects.ContainsKey(marker.m_content))
            {
                GameObject markerObject = m_markerObjects[marker.m_content];
                markerObject.GetComponent <MarkerVisualizationObject>().SetMarker(marker);
            }
            else
            {
                GameObject markerObject = Instantiate <GameObject>(m_markerPrefab);
                m_markerObjects.Add(marker.m_content, markerObject);
                markerObject.GetComponent <MarkerVisualizationObject_Custom>().SetMarker(marker);
            }
        }
    }
Пример #14
0
        protected override void OnResume()
        {
            base.OnResume();

            _tango = new Tango(this, new Runnable(() =>
            {
                Log.Debug(Tag, "TangoRunnable");
                try
                {
                    TangoSupport.Initialize();
                    _tangoConfig = SetupTangoConfig(_tango);
                    _tango.Connect(_tangoConfig);
                    _isConnected = true;
                    var adfs     = _tango.ListAreaDescriptions();

                    // get the number of adfs
                    var nr       = adfs.Count;
                    var textView = FindViewById <TextView>(Resource.Id.textView2);

                    RunOnUiThread(() => textView.Text = nr.ToString());
                }
                catch (TangoOutOfDateException e)
                {
                    Log.Error(Tag, "TangoOutOfDateException", e);
                }
                catch (TangoErrorException e)
                {
                    // this exception gets thrown
                    Log.Error(Tag, "TangoErrorException", e);
                }
                catch (TangoInvalidException e)
                {
                    Log.Error(Tag, "TangoInvalidException", e);
                }
            }));
        }
Пример #15
0
    /// <summary>
    /// Update AR screen rendering and attached Camera's projection matrix.
    /// </summary>
    /// <param name="displayRotation">Activity (screen) rotation.</param>
    /// <param name="colorCameraRotation">Color camera sensor rotation.</param>
    private void _SetRenderAndCamera(OrientationManager.Rotation displayRotation,
                                     OrientationManager.Rotation colorCameraRotation)
    {
        float cameraWidth  = (float)Screen.width;
        float cameraHeight = (float)Screen.height;

        #pragma warning disable 0219
        // Here we are computing if current display orientation is landscape or portrait.
        // AndroidHelper.GetAndroidDefaultOrientation() returns 1 if device default orientation is in portrait,
        // returns 2 if device default orientation is landscape. Adding device default orientation with
        // how much the display is rotated from default orientation will get us the result of current display
        // orientation. (landscape vs. portrait)
        bool  isLandscape           = (AndroidHelper.GetDefaultOrientation() + (int)displayRotation) % 2 == 0;
        bool  needToFlipCameraRatio = false;
        float cameraRatio           = (float)Screen.width / (float)Screen.height;
        #pragma warning restore 0219

#if !UNITY_EDITOR
        // In most cases, we don't need to flip the camera width and height. However, in some cases Unity camera
        // only updates a couple of frames after the display changed callback from Android; thus, we need to flip the width
        // and height in this case.
        //
        // This does not happen in the editor, because the emulated device does not ever rotate.
        needToFlipCameraRatio = (!isLandscape & (cameraRatio > 1.0f)) || (isLandscape & (cameraRatio < 1.0f));

        if (needToFlipCameraRatio)
        {
            cameraRatio = 1.0f / cameraRatio;
            float tmp = cameraWidth;
            cameraWidth  = cameraHeight;
            cameraHeight = tmp;
        }
#endif

        TangoCameraIntrinsics alignedIntrinsics = new TangoCameraIntrinsics();
        TangoCameraIntrinsics intrinsics        = new TangoCameraIntrinsics();
        VideoOverlayProvider.GetDeviceOrientationAlignedIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR,
                                                                   alignedIntrinsics);
        VideoOverlayProvider.GetIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR,
                                           intrinsics);

        if (alignedIntrinsics.width != 0 && alignedIntrinsics.height != 0)
        {
            // The camera to which this script is attached is an Augmented Reality camera.  The color camera
            // image must fill that camera's viewport.  That means we must clip the color camera image to make
            // its ratio the same as the Unity camera.  If we don't do this the color camera image will be
            // stretched non-uniformly, making a circle into an ellipse.
            float widthRatio  = (float)cameraWidth / (float)alignedIntrinsics.width;
            float heightRatio = (float)cameraHeight / (float)alignedIntrinsics.height;

            if (widthRatio >= heightRatio)
            {
                m_uOffset = 0;
                m_vOffset = (1 - (heightRatio / widthRatio)) / 2;
            }
            else
            {
                m_uOffset = (1 - (widthRatio / heightRatio)) / 2;
                m_vOffset = 0;
            }

            // Note that here we are passing in non-inverted intrinsics, because the YUV conversion is still operating
            // on native buffer layout.
            OrientationManager.Rotation rotation = TangoSupport.RotateFromAToB(displayRotation, colorCameraRotation);
            _MaterialUpdateForIntrinsics(m_uOffset, m_vOffset, rotation);
            _CameraUpdateForIntrinsics(m_camera, alignedIntrinsics, m_uOffset, m_vOffset);
            if (m_arCameraPostProcess != null)
            {
                m_arCameraPostProcess.SetupIntrinsic(intrinsics);
            }
        }
        else
        {
            Debug.LogError("AR Camera intrinsic is not valid.");
        }
    }
Пример #16
0
    /// <summary>
    /// Minji Kim 2017.09.25
    /// Find Edge Wrapper
    /// call FindEdgesNearPoint
    /// TangoSupport_findEdgesNearPoint
    /// </summary>
    /// <param name="cam"></param>
    /// <param name="pos"></param>
    /// <returns></returns>
    public bool FindEdges(TangoUnityImageData imageBuffer,
                          Camera cam, Vector2 pos, out TangoSupport.TangoSupportEdge[] edges, out int num_edges)
    //out Vector3[] end_points, out Vector3 nearest_on_edge)
    {
        if (m_pointsCount == 0)
        {
            // No points to check, maybe not connected to the service yet
            edges     = new TangoSupport.TangoSupportEdge[1];
            num_edges = 0;
            return(false);
        }

        Vector2 normalizedPos = cam.ScreenToViewportPoint(pos);

        // If the camera has a TangoARScreen attached, it is not displaying the entire color camera image.  Correct
        // the normalized coordinates by taking the clipping into account.
        TangoARScreen arScreen = cam.gameObject.GetComponent <TangoARScreen>();

        if (arScreen != null)
        {
            normalizedPos = arScreen.ViewportPointToCameraImagePoint(normalizedPos);
        }

        // if the image data has not been updated, update it manually
        if (imageBuffer.data == null)
        {
            GetTangoUnityImageData(ref imageBuffer);
        }

        bool returnValue = TangoSupport.FindEdgesNearPoint(
            imageBuffer,
            m_mostRecentPointCloud,
            arScreen.m_screenUpdateTime,
            normalizedPos,
            out edges,
            out num_edges
            );

        Vector3[] start = new Vector3[num_edges];
        Vector3[] end   = new Vector3[num_edges];
        Vector3[] near  = new Vector3[num_edges];
        TangoSupport.TangoSupportEdge[] n_edges = new TangoSupport.TangoSupportEdge[num_edges];
        for (int j = 0; j < num_edges; j++)
        {
            start[j] = new Vector3(edges[j].end_points_x1, edges[j].end_points_y1, edges[j].end_points_z1);
            end[j]   = new Vector3(edges[j].end_points_x2, edges[j].end_points_y2, edges[j].end_points_z2);
            near[j]  = new Vector3(edges[j].closest_point_on_edge_x, edges[j].closest_point_on_edge_y,
                                   edges[j].closest_point_on_edge_z);
            start[j] = m_mostRecentUnityWorldTDepthCamera.MultiplyVector(start[j]);
            end[j]   = m_mostRecentUnityWorldTDepthCamera.MultiplyVector(end[j]);
            near[j]  = m_mostRecentUnityWorldTDepthCamera.MultiplyVector(near[j]);
            Vector3.Normalize(start[j]);
            Vector3.Normalize(end[j]);
            Vector3.Normalize(near[j]);
            n_edges[j].end_points_x1           = start[j][0];
            n_edges[j].end_points_y1           = start[j][1];
            n_edges[j].end_points_z1           = start[j][2];
            n_edges[j].end_points_x2           = end[j][0];
            n_edges[j].end_points_y2           = end[j][1];
            n_edges[j].end_points_z2           = end[j][2];
            n_edges[j].closest_point_on_edge_x = near[j][0];
            n_edges[j].closest_point_on_edge_y = near[j][1];
            n_edges[j].closest_point_on_edge_z = near[j][2];
        }
        edges = n_edges;

        return(returnValue);
    }