/// <summary> /// Get the device pose. /// </summary> /// <returns><c>true</c>, if the device pose is valid, <c>false</c> otherwise.</returns> /// <param name="timestamp">The time stamp.</param> /// <param name="poseData">The pose data returned from the function.</param> private bool _GetDevicePose(double timestamp, out TangoPoseData poseData) { TangoCoordinateFramePair pair; poseData = new TangoPoseData(); pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; if (m_useAreaDescriptionPose) { if (m_tangoApplication.m_enableCloudADF) { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_GLOBAL_WGS84; } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; } } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; } PoseProvider.GetPoseAtTime(poseData, timestamp, pair); if (poseData.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { return(false); } return(true); }
/// <summary> /// The function is for querying the camera extrinsic, for example: the transformation between /// IMU and device frame. These extrinsics is used to transform the pose from the color camera frame /// to the device frame. Because the extrinsic is being queried using the GetPoseAtTime() /// with a desired frame pair, it can only be queried after the ConnectToService() is called. /// /// The device with respect to IMU frame is not directly queryable from API, so we use the IMU /// frame as a temporary value to get the device frame with respect to IMU frame. /// </summary> private void _SetCameraExtrinsics() { double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Get the transformation of device frame with respect to IMU frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Matrix4x4 imuTd = poseData.ToMatrix4x4(); // Get the transformation of IMU frame with respect to color camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Matrix4x4 imuTc = poseData.ToMatrix4x4(); // Get the transform of the Unity Camera frame with respect to the Color Camera frame. Matrix4x4 cTuc = new Matrix4x4(); cTuc.SetColumn(0, new Vector4(1.0f, 0.0f, 0.0f, 0.0f)); cTuc.SetColumn(1, new Vector4(0.0f, -1.0f, 0.0f, 0.0f)); cTuc.SetColumn(2, new Vector4(0.0f, 0.0f, 1.0f, 0.0f)); cTuc.SetColumn(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f)); m_dTuc = Matrix4x4.Inverse(imuTd) * imuTc * cTuc; }
/// <summary> /// 当闭环发生时, 校正已保存的 marker. /// /// 当 Tango 服务在学习模式时, 漂移会随时间积累, 但当系统看到一个已存在区域时, 它将校正先前保存的姿态. /// 这个操作称为闭环. 当闭环发生时, 我们需要重新查询先前保存的 marker 位置以获得最好结果. /// </summary> private void _UpdateUnitsForLoopClosures() { // 每当一个闭环事件时, 调整 marker 位置. foreach (SceneUnit unit in MapSceneManager.Instance.GetAllUnit()) { if (unit.m_timestamp != -1.0f) { TangoCoordinateFramePair pair; TangoPoseData relocalizedPose = new TangoPoseData(); pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(relocalizedPose, unit.m_timestamp, pair); Matrix4x4 uwTDevice = m_poseController.m_uwTss * relocalizedPose.ToMatrix4x4() * m_poseController.m_dTuc; Matrix4x4 uwTMarker = uwTDevice * unit.m_deviceT; unit.thisT.position = uwTMarker.GetColumn(3); unit.thisT.rotation = Quaternion.LookRotation(uwTMarker.GetColumn(2), uwTMarker.GetColumn(1)); } } }
/// <summary> /// Sets up extrinsic matrixes and camera intrinsics for this hardware. /// </summary> private void _SetUpCameraData() { if (m_cameraDataSetUp) { return; } double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query the extrinsics between IMU and device frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); m_imuTDevice = DMatrix4x4.FromMatrix4x4(poseData.ToMatrix4x4()); // Query the extrinsics between IMU and depth camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_DEPTH; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); m_imuTDepthCamera = DMatrix4x4.FromMatrix4x4(poseData.ToMatrix4x4()); m_deviceTDepthCamera = m_imuTDevice.Inverse * m_imuTDepthCamera; // Also get the camera intrinsics m_colorCameraIntrinsics = new TangoCameraIntrinsics(); VideoOverlayProvider.GetIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_colorCameraIntrinsics); m_cameraDataSetUp = true; }
/// <summary> /// Sets up extrinsic matrixces for this hardware. /// </summary> private void _SetUpExtrinsics() { double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query the extrinsics between IMU and device frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTDevice = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); // Query the extrinsics between IMU and color camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_DEPTH; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTDepthCamera = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); }
/// <summary> /// Correct all saved marks when loop closure happens. /// /// When Tango Service is in learning mode, the drift will accumulate overtime, but when the system sees a /// preexisting area, it will do a operation to correct all previously saved poses /// (the pose you can query with GetPoseAtTime). This operation is called loop closure. When loop closure happens, /// we will need to re-query all previously saved marker position in order to achieve the best result. /// This function is doing the querying job based on timestamp. /// </summary> private void _UpdateMarkersForLoopClosures() { // Adjust mark's position each time we have a loop closure detected. foreach (GameObject obj in m_markerList) { ARMarker tempMarker = obj.GetComponent <ARMarker>(); if (tempMarker.m_timestamp != -1.0f) { TangoCoordinateFramePair pair; TangoPoseData relocalizedPose = new TangoPoseData(); pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(relocalizedPose, tempMarker.m_timestamp, pair); Matrix4x4 uwTDevice = m_poseController.m_uwTss * relocalizedPose.ToMatrix4x4() * m_poseController.m_dTuc; Matrix4x4 uwTMarker = uwTDevice * tempMarker.m_deviceTMarker; obj.transform.position = uwTMarker.GetColumn(3); obj.transform.rotation = Quaternion.LookRotation(uwTMarker.GetColumn(2), uwTMarker.GetColumn(1)); } } }
/// <summary> /// The function is for querying the camera extrinsic, for example: the transformation between /// IMU and device frame. These extrinsics is used to transform the pose from the color camera frame /// to the device frame. Because the extrinsic is being queried using the GetPoseAtTime() /// with a desired frame pair, it can only be queried after the ConnectToService() is called. /// /// The device with respect to IMU frame is not directly queryable from API, so we use the IMU /// frame as a temporary value to get the device frame with respect to IMU frame. /// </summary> private void _SetCameraExtrinsics() { double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Getting the transformation of device frame with respect to IMU frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTd = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); // Getting the transformation of IMU frame with respect to color camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTc = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); m_dTuc = Matrix4x4.Inverse(m_imuTd) * m_imuTc * m_cTuc; }
/// <summary> /// Sets up extrinsic matrixes and camera intrinsics for this hardware. /// </summary> private void _SetUpCameraData() { if (m_cameraDataSetUp) { return; } double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query the extrinsics between IMU and device frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTDevice = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); // Query the extrinsics between IMU and color camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTColorCamera = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); // Query the extrinsics between IMU and depth camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_DEPTH; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTDepthCamera = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); // Also get the camera intrinsics m_colorCameraIntrinsics = new TangoCameraIntrinsics(); VideoOverlayProvider.GetIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_colorCameraIntrinsics); m_cameraDataSetUp = true; }
/// <summary> /// Fix this! Doesn't output correct values. Input matrix probably not correct. /// Generates the depth map using nearest neighbor upsampling. /// </summary> private void GenerateDepthMap_NearestNeighbor(ref TangoUnityDepth tangoUnityDepth) { TangoPoseData poseData = new TangoPoseData(); TangoCoordinateFramePair pair; pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, tangoUnityDepth.m_timestamp, pair); if (poseData.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { return; } Vector3 position; Quaternion rotation; TangoSupport.TangoPoseToWorldTransform(poseData, out position, out rotation); Matrix4x4 ccWorld = Matrix4x4.TRS(position, rotation, Vector3.one); bool isValid = false; Vector3 colorCameraPoint = new Vector3(); for (int i = 0; i < _depthMapWidth; i++) { for (int j = 0; j < _depthMapHeight; j++) { if (TangoSupport.ScreenCoordinateToWorldNearestNeighbor( _PointCloud.m_points, _PointCloud.m_pointsCount, tangoUnityDepth.m_timestamp, _ccIntrinsics, ref ccWorld, new Vector2(i / (float)_depthMapWidth, j / (float)_depthMapHeight), out colorCameraPoint, out isValid) == Common.ErrorType.TANGO_INVALID) { _depthTexture.SetPixel(i, j, Color.red); continue; } if (isValid) { float c = 1 - colorCameraPoint.z / 4.5f; _depthTexture.SetPixel(i, j, new Color(c, c, c)); } else { _depthTexture.SetPixel(i, j, Color.black); } } } _depthTexture.Apply(); _DepthMapQuad.sharedMaterial.mainTexture = _depthTexture; //_debugMessage = "DepthAvailable: " + _waitingForDepth.ToString() + "\n" + // " points: " + _PointCloud.m_pointsCount + "\n" + // " timestamp: " + tangoUnityDepth.m_timestamp.ToString("0.00") + "\n" + // " XYZ:" + colorCameraPoint.ToString(); }
/// <summary> /// Update the transformation to the pose for that timestamp. /// </summary> /// <param name="timestamp">Time in seconds to update the transformation to.</param> private void _UpdateTransformation(double timestamp) { TangoPoseData pose = new TangoPoseData(); TangoCoordinateFramePair pair; if (!m_useAreaDescriptionPose) { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } PoseProvider.GetPoseAtTime(pose, timestamp, pair); // The callback pose is for device with respect to start of service pose. if (pose.status_code == TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { // Construct matrix for the start of service with respect to device from the pose. Vector3 posePosition = new Vector3((float)pose.translation[0], (float)pose.translation[1], (float)pose.translation[2]); Quaternion poseRotation = new Quaternion((float)pose.orientation[0], (float)pose.orientation[1], (float)pose.orientation[2], (float)pose.orientation[3]); Matrix4x4 ssTd = Matrix4x4.TRS(posePosition, poseRotation, Vector3.one); // Calculate matrix for the camera in the Unity world, taking into account offsets. Matrix4x4 uwTuc = m_uwTss * ssTd * m_dTuc; // Extract final position, rotation. m_tangoPosition = uwTuc.GetColumn(3); m_tangoRotation = Quaternion.LookRotation(uwTuc.GetColumn(2), uwTuc.GetColumn(1)); // Other pose data -- Pose count gets reset if pose status just became valid. if (pose.status_code != m_poseStatus) { m_poseCount = 0; } m_poseCount++; // Other pose data -- Pose time. m_poseTimestamp = timestamp; } m_poseStatus = pose.status_code; // Apply final position and rotation. transform.position = m_tangoPosition; transform.rotation = m_tangoRotation; }
/// <summary> /// This callback function is called after user appoved or declined the permission to use Motion Tracking. /// </summary> private void _OnTangoApplicationPermissionsEvent(bool permissionsGranted) { if (permissionsGranted) { m_tangoApplication.InitApplication(); m_tangoApplication.InitProviders(string.Empty); m_tangoApplication.ConnectToService(); // Ask ARScreen to query the camera intrinsics from Tango Service. m_arScreen.SetCameraIntrinsics(); // The following code is querying the camera extrinsic, for example: the transformation between // IMU and device frame. These extrinsics is used to transform the pose from the device frame // to the color camera frame. Because the extrinsic is being queried using the GetPoseAtTime() // with a desired frame pair, it can only be queried after the ConnectToService() is called. // // The device with respect to IMU frame is not directly queryable from API, so we use the IMU // frame as a temporary value to get the device frame with respect to IMU frame. double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Getting the transformation of device frame with respect to IMU frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTd = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); // Getting the transformation of IMU frame with respect to color camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTc = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); } else { AndroidHelper.ShowAndroidToastMessage("Motion Tracking Permissions Needed", true); } }
/// <summary> /// Apply any needed changes to the pose. /// </summary> private void Update() { if (m_shouldInitTango) { m_tangoApplication.InitApplication(); m_tangoApplication.InitProviders(string.Empty); m_tangoApplication.ConnectToService(); m_shouldInitTango = false; double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTd = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTc = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); } if (m_isDirty) { Matrix4x4 ssTd = Matrix4x4.TRS(m_tangoPosition, m_tangoRotation, Vector3.one); Matrix4x4 uwTuc = m_uwTss * ssTd * Matrix4x4.Inverse(m_imuTd) * m_imuTc * m_cTuc; // Extract new local position transform.position = uwTuc.GetColumn(3); // Extract new local rotation transform.rotation = Quaternion.LookRotation(uwTuc.GetColumn(2), uwTuc.GetColumn(1)); } if (Input.GetKeyDown(KeyCode.Escape)) { if (m_tangoApplication != null) { m_tangoApplication.Shutdown(); } // This is a temporary fix for a lifecycle issue where calling // Application.Quit() here, and restarting the application immediately, // results in a hard crash. AndroidHelper.AndroidQuit(); } }
private void _OnTangoApplicationPermissionsEvent(bool permissionsGranted) { if (permissionsGranted && !m_alreadyInitialized) { Debug.Log("SampleController._OnApplicationPermissionsEvent()"); m_tangoApplication.InitApplication(); if (m_useADF) { // Query the full adf list. PoseProvider.RefreshADFList(); // loading last recorded ADF string uuid = PoseProvider.GetLatestADFUUID().GetStringDataUUID(); m_tangoApplication.InitProviders(uuid); } else { m_tangoApplication.InitProviders(string.Empty); } m_tangoApplication.ConnectToService(); // Query extrinsics constant tranformations. TangoPoseData poseData = new TangoPoseData(); double timestamp = 0.0; TangoCoordinateFramePair pair; pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_deviceToIMUMatrix = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_cameraToIMUMatrix = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); m_alreadyInitialized = true; } else if (!permissionsGranted) { AndroidHelper.ShowAndroidToastMessage("Motion Tracking Permissions Needed", true); } }
/// <summary> /// Sets up extrinsic matrixes and camera intrinsics for this hardware. /// </summary> private void _SetUpCameraData() { if (m_cameraDataSetUp) { return; } double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); #if UNITY_EDITOR // Constant matrixes representing just the convention swap. m_imuTDevice.SetColumn(0, new Vector4(0.0f, 1.0f, 0.0f, 0.0f)); m_imuTDevice.SetColumn(1, new Vector4(-1.0f, 0.0f, 0.0f, 0.0f)); m_imuTDevice.SetColumn(2, new Vector4(0.0f, 0.0f, 1.0f, 0.0f)); m_imuTDevice.SetColumn(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f)); m_imuTDepthCamera.SetColumn(0, new Vector4(0.0f, 1.0f, 0.0f, 0.0f)); m_imuTDepthCamera.SetColumn(1, new Vector4(1.0f, 0.0f, 0.0f, 0.0f)); m_imuTDepthCamera.SetColumn(2, new Vector4(0.0f, 0.0f, -1.0f, 0.0f)); m_imuTDepthCamera.SetColumn(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f)); #else // Query the extrinsics between IMU and device frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); m_imuTDevice = poseData.ToMatrix4x4(); // Query the extrinsics between IMU and depth camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_DEPTH; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); m_imuTDepthCamera = poseData.ToMatrix4x4(); #endif // Also get the camera intrinsics m_colorCameraIntrinsics = new TangoCameraIntrinsics(); VideoOverlayProvider.GetIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_colorCameraIntrinsics); m_cameraDataSetUp = true; }
/// <summary> /// An event notifying when new depth data is available. OnTangoDepthAvailable events are thread safe. /// </summary> /// <param name="tangoDepth">Depth data that we get from API.</param> public void OnTangoDepthAvailable(TangoUnityDepth tangoDepth) { // Fill in the data to draw the point cloud. if (tangoDepth != null) { m_occupancyManager.depthPointCount = tangoDepth.m_pointCount; for (int i = 0; i < tangoDepth.m_pointCount; i += 3) { m_depthPoints[3 * i] = tangoDepth.m_points[i * 3]; m_depthPoints[3 * i + 1] = tangoDepth.m_points[i * 3 + 1]; m_depthPoints[3 * i + 2] = tangoDepth.m_points[i * 3 + 2]; } m_currTangoDepth.m_timestamp = tangoDepth.m_timestamp; m_currTangoDepth.m_pointCount = tangoDepth.m_pointCount; PoseProvider.GetPoseAtTime(m_poseAtDepthTimestamp, m_currTangoDepth.m_timestamp, m_coordinatePair); m_isDirty = true; } return; }
/// <summary> /// Callback that gets called when depth is available /// from the Tango Service. /// DO NOT USE THE UNITY API FROM INSIDE THIS FUNCTION! /// </summary> /// <param name="callbackContext">Callback context.</param> /// <param name="xyzij">Xyzij.</param> public void OnTangoDepthAvailable(TangoUnityDepth tangoDepth) { // Fill in the data to draw the point cloud. if (tangoDepth != null) { occupancyManager.depthPointCount = tangoDepth.m_pointCount; for (int i = 0; i < tangoDepth.m_pointCount; i += 3) { depthPoints[3 * i] = tangoDepth.m_vertices[i].x; depthPoints[3 * i + 1] = tangoDepth.m_vertices[i].y; depthPoints[3 * i + 2] = tangoDepth.m_vertices[i].z; } currXYZij.timestamp = tangoDepth.m_timestamp; currXYZij.xyz_count = tangoDepth.m_pointCount; PoseProvider.GetPoseAtTime(poseAtDepthTimestamp, currXYZij.timestamp, coordinatePair); //minimize things happening in the callback isDirty = true; } return; }
/// <summary> /// Update the camera gameobject's transformation to the pose that on current timestamp. /// </summary> private void _UpdateTransformation(double timestamp) { TangoPoseData pose = new TangoPoseData(); TangoCoordinateFramePair pair; pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(pose, timestamp, pair); m_status = pose.status_code; if (pose.status_code == TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { Vector3 m_tangoPosition = new Vector3((float)pose.translation [0], (float)pose.translation [1], (float)pose.translation [2]); Quaternion m_tangoRotation = new Quaternion((float)pose.orientation [0], (float)pose.orientation [1], (float)pose.orientation [2], (float)pose.orientation [3]); Matrix4x4 ssTd = Matrix4x4.TRS(m_tangoPosition, m_tangoRotation, Vector3.one); // Here we are getting the pose of Unity camera frame with respect to Unity world. // This is the transformation of our current pose within the Unity coordinate frame. Matrix4x4 uwTuc = m_uwTss * ssTd * m_dTuc; // Extract new local position m_renderCamera.transform.position = uwTuc.GetColumn(3); // Extract new local rotation m_renderCamera.transform.rotation = Quaternion.LookRotation(uwTuc.GetColumn(2), uwTuc.GetColumn(1)); m_frameCount++; } else { m_frameCount = 0; } }
/// <summary> /// An event notifying when new depth data is available. OnTangoDepthAvailable events are thread safe. /// </summary> /// <param name="tangoDepth">Depth data that we get from API.</param> public void OnTangoDepthAvailable(TangoUnityDepth tangoDepth) { // Fill in the data to draw the point cloud. if (tangoDepth != null) { if (tangoDepth.m_points == null) { Debug.Log("Depth points are null"); return; } if (tangoDepth.m_pointCount > m_currTangoDepth.m_points.Length) { m_currTangoDepth.m_points = new float[3 * (int)(1.5f * tangoDepth.m_pointCount)]; } for (int i = 0; i < tangoDepth.m_pointCount; i += 3) { m_currTangoDepth.m_points[(3 * i) + 0] = tangoDepth.m_points[(i * 3) + 0]; m_currTangoDepth.m_points[(3 * i) + 1] = tangoDepth.m_points[(i * 3) + 1]; m_currTangoDepth.m_points[(3 * i) + 2] = tangoDepth.m_points[(i * 3) + 2]; } m_currTangoDepth.m_timestamp = tangoDepth.m_timestamp; m_currTangoDepth.m_pointCount = tangoDepth.m_pointCount; PoseProvider.GetPoseAtTime(m_poseAtDepthTimestamp, m_currTangoDepth.m_timestamp, m_coordinatePair); if (m_poseAtDepthTimestamp.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { return; } m_isDirty = true; } return; }
/// <summary> /// Update is called once per frame. /// </summary> private void Update() { if (m_isDirty) { // Query pose to transform point cloud to world coordinates. TangoPoseData poseData = new TangoPoseData(); TangoCoordinateFramePair pair; pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, m_previousDepthDeltaTime, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_deviceToStartServiceMatrix = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); Matrix4x4 pointcloudTRS = m_startServiceToUnityWorldMatrix * m_deviceToStartServiceMatrix * Matrix4x4.Inverse(m_poseController.m_deviceToIMUMatrix) * m_poseController.m_cameraToIMUMatrix * m_unityCameraToCameraMatrix; renderer.material.SetMatrix("local_transformation", pointcloudTRS); // Update the point cloud. _UpdateMesh(); m_isDirty = false; } }
/// <summary> /// Callback that gets called when depth is available from the Tango Service. /// </summary> /// <param name="tangoDepth">Depth information from Tango.</param> public void OnTangoDepthAvailable(TangoUnityDepth tangoDepth) { // Calculate the time since the last successful depth data // collection. if (m_previousDepthDeltaTime == 0.0) { m_previousDepthDeltaTime = tangoDepth.m_timestamp; } else { m_depthDeltaTime = (float)((tangoDepth.m_timestamp - m_previousDepthDeltaTime) * 1000.0); m_previousDepthDeltaTime = tangoDepth.m_timestamp; } // Fill in the data to draw the point cloud. if (tangoDepth != null && tangoDepth.m_points != null) { m_pointsCount = tangoDepth.m_pointCount; if (m_pointsCount > 0) { _SetUpExtrinsics(); TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query pose to transform point cloud to world coordinates, here we are using the timestamp // that we get from depth. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, m_previousDepthDeltaTime, pair); if (poseData.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { return; } Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_startServiceTDevice = Matrix4x4.TRS(position, quat, Vector3.one); // The transformation matrix that represents the pointcloud's pose. // Explanation: // The pointcloud which is in Depth camera's frame, is put in unity world's // coordinate system(wrt unity world). // Then we are extracting the position and rotation from uwTuc matrix and applying it to // the PointCloud's transform. Matrix4x4 unityWorldTDepthCamera = m_unityWorldTStartService * m_startServiceTDevice * Matrix4x4.Inverse(m_imuTDevice) * m_imuTDepthCamera; transform.position = Vector3.zero; transform.rotation = Quaternion.identity; // Converting points array to world space. m_overallZ = 0; for (int i = 0; i < m_pointsCount; ++i) { float x = tangoDepth.m_points[(i * 3) + 0]; float y = tangoDepth.m_points[(i * 3) + 1]; float z = tangoDepth.m_points[(i * 3) + 2]; m_points[i] = unityWorldTDepthCamera.MultiplyPoint(new Vector3(x, y, z)); m_overallZ += z; } m_overallZ = m_overallZ / m_pointsCount; if (m_updatePointsMesh) { // Need to update indicies too! int[] indices = new int[m_pointsCount]; for (int i = 0; i < m_pointsCount; ++i) { indices[i] = i; } m_mesh.Clear(); m_mesh.vertices = m_points; m_mesh.SetIndices(indices, MeshTopology.Points, 0); } // The color should be pose relative, we need to store enough info to go back to pose values. m_renderer.material.SetMatrix("depthCameraTUnityWorld", unityWorldTDepthCamera.inverse); } else { m_overallZ = 0; } } }
/// <summary> /// Callback that gets called when depth is available /// from the Tango Service. /// DO NOT USE THE UNITY API FROM INSIDE THIS FUNCTION! /// </summary> /// <param name="callbackContext">Callback context.</param> /// <param name="xyzij">Xyzij.</param> public void OnTangoDepthAvailable(TangoUnityDepth tangoDepth) { // Calculate the time since the last successful depth data // collection. if (m_previousDepthDeltaTime == 0.0) { m_previousDepthDeltaTime = tangoDepth.m_timestamp; } else { m_depthDeltaTime = (float)((tangoDepth.m_timestamp - m_previousDepthDeltaTime) * 1000.0); m_previousDepthDeltaTime = tangoDepth.m_timestamp; } // Fill in the data to draw the point cloud. if (tangoDepth != null && tangoDepth.m_points != null) { int numberOfActiveVertices = tangoDepth.m_pointCount; m_pointsCount = numberOfActiveVertices; float validPointCount = 0; if (numberOfActiveVertices > 0) { _SetUpExtrinsics(); TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query pose to transform point cloud to world coordinates, here we are using the timestamp // that we get from depth. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, m_previousDepthDeltaTime, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_ssTd = Matrix4x4.TRS(position, quat, Vector3.one); // The transformation matrix that represents the pointcloud's pose. // Explanation: // The pointcloud which is in RGB's camera frame, is put in unity world's // coordinate system(wrt unit camera). // Then we are extracting the position and rotation from uwTuc matrix and applying it to // the PointCloud's transform. Matrix4x4 uwTuc = m_uwTss * m_ssTd * Matrix4x4.Inverse(m_imuTd) * m_imuTc * m_cTuc; transform.position = uwTuc.GetColumn(3); transform.rotation = Quaternion.LookRotation(uwTuc.GetColumn(2), uwTuc.GetColumn(1)); Vector3[] pointCloudVertices = new Vector3[VERT_COUNT]; // Converting points array to point vector array for Unity to create a mesh. for (int i = 0; i < numberOfActiveVertices; ++i) { pointCloudVertices[i] = new Vector3(tangoDepth.m_points[i * 3], tangoDepth.m_points[i * 3 + 1], tangoDepth.m_points[i * 3 + 2]); m_overallZ += pointCloudVertices[i].z; ++validPointCount; } m_mesh.Clear(); m_mesh.vertices = pointCloudVertices; m_mesh.triangles = m_triangles; m_mesh.SetIndices(m_triangles, MeshTopology.Points, 0); } // Don't divide by zero! if (validPointCount != 0) { m_overallZ = m_overallZ / validPointCount; } else { m_overallZ = 0; } } }
/// @endcond /// <summary> /// Updates the transformation to the pose for that timestamp. /// </summary> /// <param name="timestamp">Time in seconds to update the transformation to.</param> private void _UpdateTransformation(double timestamp) { TangoPoseData pose = new TangoPoseData(); TangoCoordinateFramePair pair; // Choose the proper pair according to the properties of this controller if (m_useAreaDescriptionPose) { if (m_tangoApplication.m_enableCloudADF) { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_GLOBAL_WGS84; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } PoseProvider.GetPoseAtTime(pose, timestamp, pair); // Update properties from pose if (pose.status_code == TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { DMatrix4x4 globalTLocal; bool success = m_tangoApplication.GetGlobalTLocal(out globalTLocal); if (!success) { Debug.LogError("Unable to obtain GlobalTLocal from TangoApplication."); return; } DMatrix4x4 uwTDevice = DMatrix4x4.FromMatrix4x4(TangoSupport.UNITY_WORLD_T_START_SERVICE) * globalTLocal.Inverse * DMatrix4x4.TR(pose.translation, pose.orientation); // Calculate matrix for the camera in the Unity world, taking into account offsets. Matrix4x4 uwTuc = uwTDevice.ToMatrix4x4() * m_dTuc * TangoSupport.m_colorCameraPoseRotation; // Extract final position, rotation. m_tangoPosition = uwTuc.GetColumn(3); m_tangoRotation = Quaternion.LookRotation(uwTuc.GetColumn(2), uwTuc.GetColumn(1)); // Other pose data -- Pose count gets reset if pose status just became valid. if (pose.status_code != m_poseStatus) { m_poseCount = 0; } m_poseCount++; // Other pose data -- Pose time. m_poseTimestamp = timestamp; } m_poseStatus = pose.status_code; // Apply final position and rotation. transform.position = m_tangoPosition; transform.rotation = m_tangoRotation; }
/// <summary> /// Apply any needed changes to the pose. /// </summary> private void Update() { #if UNITY_ANDROID && !UNITY_EDITOR if (m_shouldInitTango) { m_tangoApplication.InitApplication(); if (m_useADF) { // Query the full adf list. PoseProvider.RefreshADFList(); // loading last recorded ADF string uuid = PoseProvider.GetLatestADFUUID().GetStringDataUUID(); m_tangoApplication.InitProviders(uuid); } else { m_tangoApplication.InitProviders(string.Empty); } // Query extrinsics constant tranformations. TangoPoseData poseData = new TangoPoseData(); double timestamp = 0.0; TangoCoordinateFramePair pair; pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_deviceToIMUMatrix = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_cameraToIMUMatrix = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); m_alreadyInitialized = true; m_shouldInitTango = false; m_tangoApplication.ConnectToService(); } if (m_isDirty) { // This rotation needs to be put into Unity coordinate space. Quaternion rotationFix = Quaternion.Euler(90.0f, 0.0f, 0.0f); if (!m_isRelocalized) { Quaternion axisFix = Quaternion.Euler(-m_tangoRotation[0].eulerAngles.x, -m_tangoRotation[0].eulerAngles.z, m_tangoRotation[0].eulerAngles.y); transform.rotation = m_startingRotation * (rotationFix * axisFix); transform.position = (m_startingRotation * (m_tangoPosition[0] * m_movementScale)) + m_startingOffset; } else { Quaternion axisFix = Quaternion.Euler(-m_tangoRotation[1].eulerAngles.x, -m_tangoRotation[1].eulerAngles.z, m_tangoRotation[1].eulerAngles.y); transform.rotation = m_startingRotation * (rotationFix * axisFix); transform.position = (m_startingRotation * (m_tangoPosition[1] * m_movementScale)) + m_startingOffset; } m_isDirty = false; } if (Input.GetKeyDown(KeyCode.Escape)) { if (m_tangoApplication != null) { m_tangoApplication.Shutdown(); } // This is a temporary fix for a lifecycle issue where calling // Application.Quit() here, and restarting the application immediately, // results in a hard crash. AndroidHelper.AndroidQuit(); } #else Vector3 tempPosition = transform.position; Quaternion tempRotation = transform.rotation; PoseProvider.GetMouseEmulation(ref tempPosition, ref tempRotation); transform.rotation = tempRotation; transform.position = tempPosition; #endif }
/// <summary> /// Updates the transformation to the latest pose. /// </summary> private void _UpdatePose() { // Query a new pose. TangoPoseData pose = new TangoPoseData(); double queryTimestamp = IsTargetingColorCamera ? m_tangoARScreen.m_screenUpdateTime : 0.0f; PoseProvider.GetPoseAtTime(pose, queryTimestamp, _GetFramePair()); // Do not update with invalide poses. if (pose.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { return; } // Do not update if the last update was for the same timestamp. if (pose.timestamp == LastPoseTimestamp) { return; } LastPoseTimestamp = pose.timestamp; DMatrix4x4 globalTLocal; if (!m_tangoApplication.GetGlobalTLocal(out globalTLocal)) { Debug.LogError("Unable to obtain GlobalTLocal from TangoApplication."); return; } DMatrix4x4 unityWorld_T_device = DMatrix4x4.FromMatrix4x4(TangoSupport.UNITY_WORLD_T_START_SERVICE) * globalTLocal.Inverse * DMatrix4x4.TR(pose.translation, pose.orientation); // Calculate matrix for the camera in the Unity world if (IsTargetingColorCamera) { m_unityWorld_T_unityCamera = unityWorld_T_device.ToMatrix4x4() * TangoSupport.COLOR_CAMERA_T_UNITY_CAMERA * TangoSupport.m_colorCameraPoseRotation; } else { m_unityWorld_T_unityCamera = unityWorld_T_device.ToMatrix4x4() * TangoSupport.DEVICE_T_UNITY_CAMERA * TangoSupport.m_devicePoseRotation; } // Extract final position and rotation from matrix. Matrix4x4 unityWorldOffset_T_unityCamera = m_unityWorldTransformOffset_T_unityWorld * m_unityWorld_T_unityCamera; Vector3 finalPosition = unityWorldOffset_T_unityCamera.GetColumn(3); Quaternion finalRotation = Quaternion.LookRotation(unityWorldOffset_T_unityCamera.GetColumn(2), unityWorldOffset_T_unityCamera.GetColumn(1)); // Filter out yaw if the clutch is enabled. if (m_clutchEnabled) { finalPosition = transform.position; finalRotation = Quaternion.Euler(finalRotation.eulerAngles.x, transform.eulerAngles.y, finalRotation.eulerAngles.z); } // Apply the final position. if (m_characterController) { m_characterController.Move(finalPosition - transform.position); } else { transform.position = finalPosition; } transform.rotation = finalRotation; }
/// <summary> /// Update is called once per frame. /// </summary> private void Update() { if (m_isDirty) { double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query the extrinsics between IMU and device frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); Vector3 position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); Quaternion quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTd = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); // Query the extrinsics between IMU and color camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_imuTc = Matrix4x4.TRS(position, quat, new Vector3(1.0f, 1.0f, 1.0f)); // Query pose to transform point cloud to world coordinates, here we are using the timestamp // that we get from depth. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, m_previousDepthDeltaTime, pair); position = new Vector3((float)poseData.translation[0], (float)poseData.translation[1], (float)poseData.translation[2]); quat = new Quaternion((float)poseData.orientation[0], (float)poseData.orientation[1], (float)poseData.orientation[2], (float)poseData.orientation[3]); m_ssTd = Matrix4x4.TRS(position, quat, Vector3.one); // The transformation matrix that represents the pointcloud's pose. Matrix4x4 uwTuc = m_uwTss * m_ssTd * Matrix4x4.Inverse(m_imuTd) * m_imuTc * m_cTuc; transform.position = uwTuc.GetColumn(3); transform.rotation = Quaternion.LookRotation(uwTuc.GetColumn(2), uwTuc.GetColumn(1)); m_mesh.Clear(); m_mesh.vertices = m_vertices; m_mesh.triangles = m_triangles; m_mesh.SetIndices(m_triangles, MeshTopology.Points, 0); m_isDirty = false; } }
/// <summary> /// Callback that gets called when depth is available from the Tango Service. /// </summary> /// <param name="pointCloud">Depth information from Tango.</param> public void OnTangoPointCloudAvailable(TangoPointCloudData pointCloud) { // Calculate the time since the last successful depth data // collection. if (m_depthTimestamp != 0.0) { m_depthDeltaTime = (float)((pointCloud.m_timestamp - m_depthTimestamp) * 1000.0); } // Fill in the data to draw the point cloud. m_pointsCount = pointCloud.m_numPoints; if (m_pointsCount > 0) { _SetUpCameraData(); TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query pose to transform point cloud to world coordinates, here we are using the timestamp // that we get from depth. if (m_useAreaDescriptionPose) { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } PoseProvider.GetPoseAtTime(poseData, pointCloud.m_timestamp, pair); if (poseData.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { return; } Matrix4x4 startServiceTDevice = poseData.ToMatrix4x4(); // The transformation matrix that represents the pointcloud's pose. // Explanation: // The pointcloud which is in Depth camera's frame, is put in unity world's // coordinate system(wrt unity world). // Then we are extracting the position and rotation from uwTuc matrix and applying it to // the PointCloud's transform. Matrix4x4 unityWorldTDepthCamera = m_unityWorldTStartService * startServiceTDevice * Matrix4x4.Inverse(m_imuTDevice) * m_imuTDepthCamera; transform.position = Vector3.zero; transform.rotation = Quaternion.identity; // Add offset to the pointcloud depending on the offset from TangoDeltaPoseController Matrix4x4 unityWorldOffsetTDepthCamera; if (m_tangoDeltaPoseController != null) { unityWorldOffsetTDepthCamera = m_tangoDeltaPoseController.UnityWorldOffset * unityWorldTDepthCamera; } else { unityWorldOffsetTDepthCamera = unityWorldTDepthCamera; } // Converting points array to world space. m_overallZ = 0; for (int i = 0; i < m_pointsCount; ++i) { Vector3 point = pointCloud[i]; m_points[i] = unityWorldOffsetTDepthCamera.MultiplyPoint3x4(point); m_overallZ += point.z; } m_overallZ = m_overallZ / m_pointsCount; m_depthTimestamp = pointCloud.m_timestamp; if (m_updatePointsMesh) { // Need to update indicies too! int[] indices = new int[m_pointsCount]; for (int i = 0; i < m_pointsCount; ++i) { indices[i] = i; } m_mesh.Clear(); m_mesh.vertices = m_points; m_mesh.SetIndices(indices, MeshTopology.Points, 0); } // The color should be pose relative, we need to store enough info to go back to pose values. m_renderer.material.SetMatrix("depthCameraTUnityWorld", unityWorldOffsetTDepthCamera.inverse); // Try to find the floor using this set of depth points if requested. if (m_findFloorWithDepth) { _FindFloorWithDepth(); } } else { m_overallZ = 0; } }
void Update() { if (!vieTangoInfo.UseADF) { framePair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; } ArgPose pose = new ArgPose(); pose.label = "Tcam"; TangoPoseData p = new TangoPoseData(); PoseProvider.GetPoseAtTime(p, 0, framePair); if (p.status_code == TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { poseOk = true; } else { poseOk = false; } vieTangoInfo.PoseOk = poseOk; if (EnableCam && poseOk) { pose.p.x = (float)p.translation.x; pose.p.y = (float)p.translation.y; pose.p.z = (float)p.translation.z; pose.r.x = (float)p.orientation.x; pose.r.y = (float)p.orientation.y; pose.r.z = (float)p.orientation.z; pose.r.w = (float)p.orientation.w; // Debug.Log("camera ROLL"+pose.r.ToString()); SendPose(pose); } Robo.transform.position = new Vector3(roboPose.p.x, roboPose.p.y, roboPose.p.z); LocalGoal.transform.position = new Vector3(localGoalPose.p.x, localGoalPose.p.y, localGoalPose.p.z); // Robo.transform.Translate(pose.p.x, pose.p.y, pose.p.z); // Debug.Log(Robo.transform.position); // Debug.Log(roboPose.p.x.ToString()+ roboPose.p.y.ToString()+roboPose.p.z.ToString()); // Robo.rotation.Set(pose.r.x, pose.r.y, pose.r.z,pose.r.w); if (newSense) { newSense = false; sensCount++; Debug.Log("SENS:" + sensData.v.ToString()); if (sensCount % 2 == 0) { GameObject so = Instantiate(SensObj, Robo.transform.position, Robo.transform.rotation); float v = sensData.v / 1000.0f; v = Mathf.Pow(v, 0.2f); if (v > 1) { v = 1; } Color col = Color.HSVToRGB(0.75f - v * 0.75f, 1, 1); col.a = 0.4f; so.GetComponent <Renderer>().material.color = col; Destroy(so, 60.0f); } } }