/** * Converts {@link DMatrix4x4} into {@link DMatrixRMaj}. * * @param input Input matrix. * @param output Output matrix. If null a new matrix will be declared. * @return Converted matrix. */ public static DMatrixRMaj convert(DMatrix4x4 input, DMatrixRMaj output) { if (output == null) { output = new DMatrixRMaj(4, 4); } if (input.getNumRows() != output.getNumRows()) { throw new ArgumentException("Number of rows do not match"); } if (input.getNumCols() != output.getNumCols()) { throw new ArgumentException("Number of columns do not match"); } output.data[0] = input.a11; output.data[1] = input.a12; output.data[2] = input.a13; output.data[3] = input.a14; output.data[4] = input.a21; output.data[5] = input.a22; output.data[6] = input.a23; output.data[7] = input.a24; output.data[8] = input.a31; output.data[9] = input.a32; output.data[10] = input.a33; output.data[11] = input.a34; output.data[12] = input.a41; output.data[13] = input.a42; output.data[14] = input.a43; output.data[15] = input.a44; return(output); }
/** * Converts {@link DMatrix4x4} into {@link DMatrixRMaj}. * * @param input Input matrix. * @param output Output matrix. If null a new matrix will be declared. * @return Converted matrix. */ public static DMatrixRMaj convert(DMatrix4x4 input, DMatrixRMaj output) { if (output == null) { output = new DMatrixRMaj(4, 4); } output.reshape(input.NumRows, input.NumCols); output.data[0] = input.a11; output.data[1] = input.a12; output.data[2] = input.a13; output.data[3] = input.a14; output.data[4] = input.a21; output.data[5] = input.a22; output.data[6] = input.a23; output.data[7] = input.a24; output.data[8] = input.a31; output.data[9] = input.a32; output.data[10] = input.a33; output.data[11] = input.a34; output.data[12] = input.a41; output.data[13] = input.a42; output.data[14] = input.a43; output.data[15] = input.a44; return(output); }
/// <summary> /// Sets up extrinsic matrixes and camera intrinsics for this hardware. /// </summary> private void _SetUpCameraData() { if (m_cameraDataSetUp) { return; } double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query the extrinsics between IMU and device frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); m_imuTDevice = DMatrix4x4.FromMatrix4x4(poseData.ToMatrix4x4()); // Query the extrinsics between IMU and depth camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_DEPTH; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); m_imuTDepthCamera = DMatrix4x4.FromMatrix4x4(poseData.ToMatrix4x4()); m_deviceTDepthCamera = m_imuTDevice.Inverse * m_imuTDepthCamera; // Also get the camera intrinsics m_colorCameraIntrinsics = new TangoCameraIntrinsics(); VideoOverlayProvider.GetIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_colorCameraIntrinsics); m_cameraDataSetUp = true; }
/** * Converts {@link DMatrixRMaj} into {@link DMatrix4x4} * * @param input Input matrix. * @param output Output matrix. If null a new matrix will be declared. * @return Converted matrix. */ public static DMatrix4x4 convert(DMatrixRMaj input, DMatrix4x4 output) { if (output == null) { output = new DMatrix4x4(); } if (input.getNumRows() != output.getNumRows()) { throw new ArgumentException("Number of rows do not match"); } if (input.getNumCols() != output.getNumCols()) { throw new ArgumentException("Number of columns do not match"); } output.a11 = input.data[0]; output.a12 = input.data[1]; output.a13 = input.data[2]; output.a14 = input.data[3]; output.a21 = input.data[4]; output.a22 = input.data[5]; output.a23 = input.data[6]; output.a24 = input.data[7]; output.a31 = input.data[8]; output.a32 = input.data[9]; output.a33 = input.data[10]; output.a34 = input.data[11]; output.a41 = input.data[12]; output.a42 = input.data[13]; output.a43 = input.data[14]; output.a44 = input.data[15]; return(output); }
/// @cond /// <summary> /// Use this for initialization. /// </summary> public void Start() { m_tangoApplication = FindObjectOfType <TangoApplication>(); m_tangoApplication.Register(this); m_tangoDeltaPoseController = FindObjectOfType <TangoDeltaPoseController>(); // Constant matrix converting Unity world frame frame to device frame. m_colorCameraTUnityCamera = new DMatrix4x4(1.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0f, 0.0f, 0.0f, 1.0f); // Assign triangles, note: this is just for visualizing point in the mesh data. m_points = new Vector3[MAX_POINT_COUNT]; m_mesh = GetComponent <MeshFilter>().mesh; m_mesh.Clear(); // Points used for finding floor plane. m_numPointsAtY = new Dictionary <float, int>(); m_nonNoiseBuckets = new List <float>(); m_renderer = GetComponent <Renderer>(); }
public static double fastNormF(DMatrix4x4 M) { double sum = 0; sum += M.a11 * M.a11 + M.a12 * M.a12 + M.a13 * M.a13 + M.a14 * M.a14; sum += M.a21 * M.a21 + M.a22 * M.a22 + M.a23 * M.a23 + M.a24 * M.a24; sum += M.a31 * M.a31 + M.a32 * M.a32 + M.a33 * M.a33 + M.a34 * M.a34; sum += M.a41 * M.a41 + M.a42 * M.a42 + M.a43 * M.a43 + M.a44 * M.a44; return(Math.Sqrt(sum)); }
public static void convert(FMatrix4x4 src, DMatrix4x4 dst) { dst.a11 = src.a11; dst.a12 = src.a12; dst.a13 = src.a13; dst.a14 = src.a14; dst.a21 = src.a21; dst.a22 = src.a22; dst.a23 = src.a23; dst.a24 = src.a24; dst.a31 = src.a31; dst.a32 = src.a32; dst.a33 = src.a33; dst.a34 = src.a34; dst.a41 = src.a41; dst.a42 = src.a42; dst.a43 = src.a43; dst.a44 = src.a44; }
public static bool hasUncountable(DMatrix4x4 a) { if (UtilEjml.isUncountable(a.a11 + a.a12 + a.a13 + a.a14)) { return(true); } if (UtilEjml.isUncountable(a.a21 + a.a22 + a.a23 + a.a24)) { return(true); } if (UtilEjml.isUncountable(a.a31 + a.a32 + a.a33 + a.a34)) { return(true); } if (UtilEjml.isUncountable(a.a41 + a.a42 + a.a43 + a.a44)) { return(true); } return(false); }
public static double normF(DMatrix4x4 M) { double scale = CommonOps_DDF4.elementMaxAbs(M); if (scale == 0.0) { return(0.0); } double a11 = M.a11 / scale, a12 = M.a12 / scale, a13 = M.a13 / scale, a14 = M.a14 / scale; double a21 = M.a21 / scale, a22 = M.a22 / scale, a23 = M.a23 / scale, a24 = M.a24 / scale; double a31 = M.a31 / scale, a32 = M.a32 / scale, a33 = M.a33 / scale, a34 = M.a34 / scale; double a41 = M.a41 / scale, a42 = M.a42 / scale, a43 = M.a43 / scale, a44 = M.a44 / scale; double sum = 0; sum += a11 * a11 + a12 * a12 + a13 * a13 + a14 * a14; sum += a21 * a21 + a22 * a22 + a23 * a23 + a24 * a24; sum += a31 * a31 + a32 * a32 + a33 * a33 + a34 * a34; sum += a41 * a41 + a42 * a42 + a43 * a43 + a44 * a44; return(scale * Math.Sqrt(sum)); }
/// <summary> /// Callback that gets called when depth is available from the Tango Service. /// </summary> /// <param name="pointCloud">Depth information from Tango.</param> public void OnTangoPointCloudAvailable(TangoPointCloudData pointCloud) { m_mostRecentPointCloud = pointCloud; // Calculate the time since the last successful depth data // collection. if (m_depthTimestamp != 0.0) { m_depthDeltaTime = (float)((pointCloud.m_timestamp - m_depthTimestamp) * 1000.0); } // Fill in the data to draw the point cloud. m_pointsCount = pointCloud.m_numPoints; if (m_pointsCount > 0) { _SetUpCameraData(); DMatrix4x4 globalTLocal; bool globalTLocalSuccess = m_tangoApplication.GetGlobalTLocal(out globalTLocal); if (!globalTLocalSuccess) { return; } DMatrix4x4 unityWorldTGlobal = DMatrix4x4.FromMatrix4x4(TangoSupport.UNITY_WORLD_T_START_SERVICE) * globalTLocal.Inverse; TangoPoseData poseData; // Query pose to transform point cloud to world coordinates, here we are using the timestamp that we get from depth. bool poseSuccess = _GetDevicePose(pointCloud.m_timestamp, out poseData); if (!poseSuccess) { return; } DMatrix4x4 unityWorldTDevice = unityWorldTGlobal * DMatrix4x4.TR(poseData.translation, poseData.orientation); // The transformation matrix that represents the point cloud's pose. // Explanation: // The point cloud, which is in Depth camera's frame, is put in Unity world's // coordinate system(wrt Unity world). // Then we are extracting the position and rotation from uwTuc matrix and applying it to // the point cloud's transform. DMatrix4x4 unityWorldTDepthCamera = unityWorldTDevice * m_deviceTDepthCamera; transform.position = Vector3.zero; transform.rotation = Quaternion.identity; // Add offset to the point cloud depending on the offset from TangoDeltaPoseController. if (m_tangoDeltaPoseController != null) { m_mostRecentUnityWorldTDepthCamera = m_tangoDeltaPoseController.UnityWorldOffset * unityWorldTDepthCamera.ToMatrix4x4(); } else { m_mostRecentUnityWorldTDepthCamera = unityWorldTDepthCamera.ToMatrix4x4(); } // Converting points array to world space. m_overallZ = 0; for (int i = 0; i < m_pointsCount; ++i) { Vector3 point = pointCloud[i]; m_points[i] = m_mostRecentUnityWorldTDepthCamera.MultiplyPoint3x4(point); m_overallZ += point.z; } m_overallZ = m_overallZ / m_pointsCount; m_depthTimestamp = pointCloud.m_timestamp; if (m_updatePointsMesh) { // Need to update indices too! int[] indices = new int[m_pointsCount]; for (int i = 0; i < m_pointsCount; ++i) { indices[i] = i; } m_mesh.Clear(); m_mesh.vertices = m_points; m_mesh.SetIndices(indices, MeshTopology.Points, 0); } // The color should be pose relative; we need to store enough info to go back to pose values. m_renderer.material.SetMatrix("depthCameraTUnityWorld", m_mostRecentUnityWorldTDepthCamera.inverse); // Try to find the floor using this set of depth points if requested. if (m_findFloorWithDepth) { _FindFloorWithDepth(); } } else { m_overallZ = 0; } }
public static int TangoSupport_getDepthAtPointNearestNeighborMatrixTransform( TangoXYZij pointCloud, TangoCameraIntrinsics cameraIntrinsics, ref DMatrix4x4 matrix, ref Vector2 uvCoordinates, out Vector3 colorCameraPoint, out int isValidPoint) { colorCameraPoint = Vector3.zero; isValidPoint = 1; return Common.ErrorType.TANGO_SUCCESS; }
public static int TangoSupport_fitPlaneModelNearPointMatrixTransform( TangoXYZij pointCloud, TangoCameraIntrinsics cameraIntrinsics, ref DMatrix4x4 matrix, ref Vector2 uvCoordinates, out DVector3 intersectionPoint, double[] planeModel) { intersectionPoint = new DVector3(); return Common.ErrorType.TANGO_SUCCESS; }
public static extern int TangoSupport_getDepthAtPointNearestNeighborMatrixTransform( TangoXYZij pointCloud, TangoCameraIntrinsics cameraIntrinsics, ref DMatrix4x4 matrix, ref Vector2 uvCoordinates, out Vector3 colorCameraPoint, [Out, MarshalAs(UnmanagedType.I4)] out int isValidPoint);
/// <summary> /// Fits a plane to a point cloud near a user-specified location. This /// occurs in two passes. First, all points in cloud within /// <c>maxPixelDistance</c> to <c>uvCoordinates</c> after projection are kept. Then a /// plane is fit to the subset cloud using RANSAC. After the initial fit /// all inliers from the original cloud are used to refine the plane /// model. /// </summary> /// <returns> /// Common.ErrorType.TANGO_SUCCESS on success, /// Common.ErrorType.TANGO_INVALID on invalid input, and /// Common.ErrorType.TANGO_ERROR on failure. /// </returns> /// <param name="pointCloud"> /// The point cloud. Cannot be null and must have at least three points. /// </param> /// <param name="pointCount"> /// The number of points to read from the point cloud. /// </param> /// <param name="timestamp">The timestamp of the point cloud.</param> /// <param name="cameraIntrinsics"> /// The camera intrinsics for the color camera. Cannot be null. /// </param> /// <param name="matrix"> /// Transformation matrix of the color camera with respect to the Unity /// World frame. /// </param> /// <param name="uvCoordinates"> /// The UV coordinates for the user selection. This is expected to be /// between (0.0, 0.0) and (1.0, 1.0). /// </param> /// <param name="intersectionPoint"> /// The output point in depth camera coordinates that the user selected. /// </param> /// <param name="plane">The plane fit.</param> public static int FitPlaneModelNearClick( Vector3[] pointCloud, int pointCount, double timestamp, TangoCameraIntrinsics cameraIntrinsics, ref Matrix4x4 matrix, Vector2 uvCoordinates, out Vector3 intersectionPoint, out Plane plane) { GCHandle pointCloudHandle = GCHandle.Alloc(pointCloud, GCHandleType.Pinned); TangoXYZij pointCloudXyzIj = new TangoXYZij(); pointCloudXyzIj.timestamp = timestamp; pointCloudXyzIj.xyz_count = pointCount; pointCloudXyzIj.xyz = pointCloudHandle.AddrOfPinnedObject(); DMatrix4x4 doubleMatrix = new DMatrix4x4(matrix); // Unity has Y pointing screen up; Tango camera has Y pointing // screen down. Vector2 uvCoordinatesTango = new Vector2(uvCoordinates.x, 1.0f - uvCoordinates.y); DVector3 doubleIntersectionPoint = new DVector3(); double[] planeArray = new double[4]; int returnValue = TangoSupportAPI.TangoSupport_fitPlaneModelNearPointMatrixTransform( pointCloudXyzIj, cameraIntrinsics, ref doubleMatrix, ref uvCoordinatesTango, out doubleIntersectionPoint, planeArray); if (returnValue != Common.ErrorType.TANGO_SUCCESS) { intersectionPoint = new Vector3(0.0f, 0.0f, 0.0f); plane = new Plane(new Vector3(0.0f, 0.0f, 0.0f), 0.0f); } else { intersectionPoint = doubleIntersectionPoint.ToVector3(); Vector3 normal = new Vector3((float)planeArray[0], (float)planeArray[1], (float)planeArray[2]); float distance = (float)planeArray[3] / normal.magnitude; plane = new Plane(normal, distance); } pointCloudHandle.Free(); return returnValue; }
/// <summary> /// Updates the transformation to the latest pose. /// </summary> private void _UpdatePose() { // Query a new pose. TangoPoseData pose = new TangoPoseData(); double queryTimestamp = IsTargetingColorCamera ? m_tangoARScreen.m_screenUpdateTime : 0.0f; PoseProvider.GetPoseAtTime(pose, queryTimestamp, _GetFramePair()); // Do not update with invalide poses. if (pose.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { return; } // Do not update if the last update was for the same timestamp. if (pose.timestamp == LastPoseTimestamp) { return; } LastPoseTimestamp = pose.timestamp; DMatrix4x4 globalTLocal; if (!m_tangoApplication.GetGlobalTLocal(out globalTLocal)) { Debug.LogError("Unable to obtain GlobalTLocal from TangoApplication."); return; } DMatrix4x4 unityWorld_T_device = DMatrix4x4.FromMatrix4x4(TangoSupport.UNITY_WORLD_T_START_SERVICE) * globalTLocal.Inverse * DMatrix4x4.TR(pose.translation, pose.orientation); // Calculate matrix for the camera in the Unity world if (IsTargetingColorCamera) { m_unityWorld_T_unityCamera = unityWorld_T_device.ToMatrix4x4() * TangoSupport.COLOR_CAMERA_T_UNITY_CAMERA * TangoSupport.m_colorCameraPoseRotation; } else { m_unityWorld_T_unityCamera = unityWorld_T_device.ToMatrix4x4() * TangoSupport.DEVICE_T_UNITY_CAMERA * TangoSupport.m_devicePoseRotation; } // Extract final position and rotation from matrix. Matrix4x4 unityWorldOffset_T_unityCamera = m_unityWorldTransformOffset_T_unityWorld * m_unityWorld_T_unityCamera; Vector3 finalPosition = unityWorldOffset_T_unityCamera.GetColumn(3); Quaternion finalRotation = Quaternion.LookRotation(unityWorldOffset_T_unityCamera.GetColumn(2), unityWorldOffset_T_unityCamera.GetColumn(1)); // Filter out yaw if the clutch is enabled. if (m_clutchEnabled) { finalPosition = transform.position; finalRotation = Quaternion.Euler(finalRotation.eulerAngles.x, transform.eulerAngles.y, finalRotation.eulerAngles.z); } // Apply the final position. if (m_characterController) { m_characterController.Move(finalPosition - transform.position); } else { transform.position = finalPosition; } transform.rotation = finalRotation; }
/// @endcond /// <summary> /// Updates the transformation to the pose for that timestamp. /// </summary> /// <param name="timestamp">Time in seconds to update the transformation to.</param> private void _UpdateTransformation(double timestamp) { TangoPoseData pose = new TangoPoseData(); TangoCoordinateFramePair pair; // Choose the proper pair according to the properties of this controller if (m_useAreaDescriptionPose) { if (m_tangoApplication.m_enableCloudADF) { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_GLOBAL_WGS84; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } PoseProvider.GetPoseAtTime(pose, timestamp, pair); // Update properties from pose if (pose.status_code == TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { DMatrix4x4 globalTLocal; bool success = m_tangoApplication.GetGlobalTLocal(out globalTLocal); if (!success) { Debug.LogError("Unable to obtain GlobalTLocal from TangoApplication."); return; } DMatrix4x4 uwTDevice = DMatrix4x4.FromMatrix4x4(TangoSupport.UNITY_WORLD_T_START_SERVICE) * globalTLocal.Inverse * DMatrix4x4.TR(pose.translation, pose.orientation); // Calculate matrix for the camera in the Unity world, taking into account offsets. Matrix4x4 uwTuc = uwTDevice.ToMatrix4x4() * m_dTuc * TangoSupport.m_colorCameraPoseRotation; // Extract final position, rotation. m_tangoPosition = uwTuc.GetColumn(3); m_tangoRotation = Quaternion.LookRotation(uwTuc.GetColumn(2), uwTuc.GetColumn(1)); // Other pose data -- Pose count gets reset if pose status just became valid. if (pose.status_code != m_poseStatus) { m_poseCount = 0; } m_poseCount++; // Other pose data -- Pose time. m_poseTimestamp = timestamp; } m_poseStatus = pose.status_code; // Apply final position and rotation. transform.position = m_tangoPosition; transform.rotation = m_tangoRotation; }
public static bool isIdentical(DMatrix4x4 a, DMatrix4x4 b, double tol) { if (!MatrixFeatures_DDRM.isIdentical(a.a11, b.a11, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a12, b.a12, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a13, b.a13, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a14, b.a14, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a21, b.a21, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a22, b.a22, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a23, b.a23, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a24, b.a24, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a31, b.a31, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a32, b.a32, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a33, b.a33, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a34, b.a34, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a41, b.a41, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a42, b.a42, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a43, b.a43, tol)) { return(false); } if (!MatrixFeatures_DDRM.isIdentical(a.a44, b.a44, tol)) { return(false); } return(true); }
/// <summary> /// Calculates the depth in the color camera space at a user-specified /// location using bilateral filtering weighted by both spatial distance /// from the user coordinate and by intensity similarity. /// </summary> /// <returns> /// Common.ErrorType.TANGO_SUCCESS on success, /// Common.ErrorType.TANGO_INVALID on invalid input, and /// Common.ErrorType.TANGO_ERROR on failure. /// </returns> /// <param name="pointCloud"> /// The point cloud. Cannot be null and must have at least one point. /// </param> /// <param name="pointCount"> /// The number of points to read from the point cloud. /// </param> /// <param name="timestamp">The timestamp of the depth points.</param> /// <param name="cameraIntrinsics"> /// The camera intrinsics for the color camera. Cannot be null. /// </param> /// <param name="colorImage"> /// The color image buffer. Cannot be null. /// </param> /// <param name="matrix"> /// Transformation matrix of the color camera with respect to the Unity /// World frame. /// </param> /// <param name="uvCoordinates"> /// The UV coordinates for the user selection. This is expected to be /// between (0.0, 0.0) and (1.0, 1.0). /// </param> /// <param name="colorCameraPoint"> /// The point (x, y, z), where (x, y) is the back-projection of the UV /// coordinates to the color camera space and z is the z coordinate of /// the point in the point cloud nearest to the user selection after /// projection onto the image plane. If there is not a point cloud point /// close to the user selection after projection onto the image plane, /// then the point will be set to (0.0, 0.0, 0.0) and isValidPoint will /// be set to false. /// </param> /// <param name="isValidPoint"> /// A flag valued true if there is a point cloud point close to the user /// selection after projection onto the image plane and valued false /// otherwise. /// </param> public static int ScreenCoordinateToWorldBilateral( Vector3[] pointCloud, int pointCount, double timestamp, TangoCameraIntrinsics cameraIntrinsics, TangoImageBuffer colorImage, ref Matrix4x4 matrix, Vector2 uvCoordinates, out Vector3 colorCameraPoint, out bool isValidPoint) { GCHandle pointCloudHandle = GCHandle.Alloc(pointCloud, GCHandleType.Pinned); TangoXYZij pointCloudXyzIj = new TangoXYZij(); pointCloudXyzIj.timestamp = timestamp; pointCloudXyzIj.xyz_count = pointCount; pointCloudXyzIj.xyz = pointCloudHandle.AddrOfPinnedObject(); DMatrix4x4 doubleMatrix = new DMatrix4x4(matrix); // Unity has Y pointing screen up; Tango camera has Y pointing // screen down. Vector2 uvCoordinatesTango = new Vector2(uvCoordinates.x, 1.0f - uvCoordinates.y); int isValidPointInteger; int returnValue = TangoSupportAPI.TangoSupport_getDepthAtPointBilateralCameraIntrinsicsMatrixTransform( pointCloudXyzIj, cameraIntrinsics, colorImage, ref doubleMatrix, ref uvCoordinatesTango, out colorCameraPoint, out isValidPointInteger); isValidPoint = isValidPointInteger != 0; pointCloudHandle.Free(); return returnValue; }
public static void normalizeF(DMatrix4x4 M) { double val = normF(M); CommonOps_DDF4.divide(M, val); }
/// <summary> /// Set controller's transformation based on received pose. /// </summary> /// <param name="pose">Received Tango pose data.</param> private void _UpdateTransformationFromPose(TangoPoseData pose) { // Remember the previous position so you can do delta motion. m_prevTangoPosition = m_tangoPosition; m_prevTangoRotation = m_tangoRotation; // The callback pose is for device with respect to start of service pose. if (pose.status_code == TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { DMatrix4x4 globalTLocal; bool success = m_tangoApplication.GetGlobalTLocal(out globalTLocal); if (!success) { Debug.LogError("Unable to obtain GlobalTLocal from Tango application."); return; } DMatrix4x4 startOfServiceTDevice = globalTLocal.Inverse * DMatrix4x4.TR(pose.translation, pose.orientation); m_uwTuc = TangoSupport.UNITY_WORLD_T_START_SERVICE * startOfServiceTDevice.ToMatrix4x4() * TangoSupport.DEVICE_T_UNITY_CAMERA * TangoSupport.m_devicePoseRotation; Matrix4x4 uwOffsetTuc = m_uwOffsetTuw * m_uwTuc; m_tangoPosition = uwOffsetTuc.GetColumn(3); // apply scale for Daydream m_tangoPosition.x *= m_motionMappingScaleXZ; m_tangoPosition.z *= m_motionMappingScaleXZ; m_tangoPosition.y *= m_motionMappingScaleY; m_tangoRotation = Quaternion.LookRotation(uwOffsetTuc.GetColumn(2), uwOffsetTuc.GetColumn(1)); // Other pose data: pose count gets reset if pose status just became valid. if (pose.status_code != m_poseStatus) { m_poseCount = 0; } m_poseCount++; // Other pose data: pose delta time. m_poseDeltaTime = (float)pose.timestamp - m_poseTimestamp; m_poseTimestamp = (float)pose.timestamp; } m_poseStatus = pose.status_code; if (m_clutchActive) { // When clutching, preserve position. m_tangoPosition = m_prevTangoPosition; // When clutching, preserve yaw, keep changes in pitch, roll. Vector3 rotationAngles = m_tangoRotation.eulerAngles; rotationAngles.y = m_prevTangoRotation.eulerAngles.y; m_tangoRotation.eulerAngles = rotationAngles; } // edit: following https://blogs.unity3d.com/jp/2017/10/18/mobile-inside-out-vr-tracking-now-readily-available-on-your-phone-with-unity/ // Calculate final position and rotation deltas and apply them. Vector3 deltaPosition = m_tangoPosition - m_prevTangoPosition; Quaternion deltaRotation = m_tangoRotation * Quaternion.Inverse(m_prevTangoRotation); if (m_characterMotion && m_characterController != null) { m_characterController.Move(deltaPosition); //transform.rotation = deltaRotation * transform.rotation; } else { transform.position = transform.position + deltaPosition; //transform.rotation = deltaRotation * transform.rotation; } }
public static extern int TangoSupport_fitPlaneModelNearPointMatrixTransform( TangoXYZij pointCloud, TangoCameraIntrinsics cameraIntrinsics, ref DMatrix4x4 matrix, ref Vector2 uvCoordinates, out DVector3 intersectionPoint, [Out, MarshalAs(UnmanagedType.LPArray, SizeConst = 4)] double[] planeModel);