/// <summary> /// Calculates the depth in the color camera space at a user-specified /// location using nearest-neighbor interpolation. /// </summary> /// <returns> /// Common.ErrorType.TANGO_SUCCESS on success and /// Common.ErrorType.TANGO_INVALID on invalid input. /// </returns> /// <param name="pointCloud"> /// The point cloud. Cannot be null and must have at least one point. /// </param> /// <param name="pointCount"> /// The number of points to read from the point cloud. /// </param> /// <param name="timestamp">The timestamp of the depth points.</param> /// <param name="cameraIntrinsics"> /// The camera intrinsics for the color camera. Cannot be null. /// </param> /// <param name="matrix"> /// Transformation matrix of the color camera with respect to the Unity /// World frame. /// </param> /// <param name="uvCoordinates"> /// The UV coordinates for the user selection. This is expected to be /// between (0.0, 0.0) and (1.0, 1.0). /// </param> /// <param name="colorCameraPoint"> /// The point (x, y, z), where (x, y) is the back-projection of the UV /// coordinates to the color camera space and z is the z coordinate of /// the point in the point cloud nearest to the user selection after /// projection onto the image plane. If there is not a point cloud point /// close to the user selection after projection onto the image plane, /// then the point will be set to (0.0, 0.0, 0.0) and isValidPoint will /// be set to 0. /// </param> /// <param name="isValidPoint"> /// A flag valued 1 if there is a point cloud point close to the user /// selection after projection onto the image plane and valued 0 /// otherwise. /// </param> public static int GetDepthAtPointNearestNeighbor( Vector3[] pointCloud, int pointCount, double timestamp, TangoCameraIntrinsics cameraIntrinsics, ref Matrix4x4 matrix, Vector2 uvCoordinates, out Vector3 colorCameraPoint, out bool isValidPoint) { GCHandle pointCloudHandle = GCHandle.Alloc(pointCloud, GCHandleType.Pinned); TangoXYZij pointCloudXyzIj = new TangoXYZij(); pointCloudXyzIj.timestamp = timestamp; pointCloudXyzIj.xyz_count = pointCount; pointCloudXyzIj.xyz = pointCloudHandle.AddrOfPinnedObject(); DMatrix4x4 doubleMatrix = new DMatrix4x4(matrix); // Unity has Y pointing screen up; Tango camera has Y pointing // screen down. float[] uvCoordinatesArray = new float[2]; uvCoordinatesArray[0] = uvCoordinates.x; uvCoordinatesArray[1] = 1.0f - uvCoordinates.y; int isValidPointInteger; int returnValue = TangoSupportAPI.TangoSupport_getDepthAtPointNearestNeighborMatrixTransform( pointCloudXyzIj, cameraIntrinsics, ref doubleMatrix, uvCoordinatesArray, out colorCameraPoint, out isValidPointInteger); isValidPoint = isValidPointInteger != 0; pointCloudHandle.Free(); return(returnValue); }
/// <summary> /// Calculates the depth in the color camera space at a user-specified /// location using bilateral filtering weighted by both spatial distance /// from the user coordinate and by intensity similarity. /// </summary> /// <returns> /// Common.ErrorType.TANGO_SUCCESS on success, /// Common.ErrorType.TANGO_INVALID on invalid input, and /// Common.ErrorType.TANGO_ERROR on failure. /// </returns> /// <param name="pointCloud"> /// The point cloud. Cannot be null and must have at least one point. /// </param> /// <param name="pointCount"> /// The number of points to read from the point cloud. /// </param> /// <param name="timestamp">The timestamp of the depth points.</param> /// <param name="cameraIntrinsics"> /// The camera intrinsics for the color camera. Cannot be null. /// </param> /// <param name="colorImage"> /// The color image buffer. Cannot be null. /// </param> /// <param name="matrix"> /// Transformation matrix of the color camera with respect to the Unity /// World frame. /// </param> /// <param name="uvCoordinates"> /// The UV coordinates for the user selection. This is expected to be /// between (0.0, 0.0) and (1.0, 1.0). /// </param> /// <param name="colorCameraPoint"> /// The point (x, y, z), where (x, y) is the back-projection of the UV /// coordinates to the color camera space and z is the z coordinate of /// the point in the point cloud nearest to the user selection after /// projection onto the image plane. If there is not a point cloud point /// close to the user selection after projection onto the image plane, /// then the point will be set to (0.0, 0.0, 0.0) and isValidPoint will /// be set to false. /// </param> /// <param name="isValidPoint"> /// A flag valued true if there is a point cloud point close to the user /// selection after projection onto the image plane and valued false /// otherwise. /// </param> public static int ScreenCoordinateToWorldBilateral( Vector3[] pointCloud, int pointCount, double timestamp, TangoCameraIntrinsics cameraIntrinsics, TangoImageBuffer colorImage, ref Matrix4x4 matrix, Vector2 uvCoordinates, out Vector3 colorCameraPoint, out bool isValidPoint) { GCHandle pointCloudHandle = GCHandle.Alloc(pointCloud, GCHandleType.Pinned); TangoXYZij pointCloudXyzIj = new TangoXYZij(); pointCloudXyzIj.timestamp = timestamp; pointCloudXyzIj.xyz_count = pointCount; pointCloudXyzIj.xyz = pointCloudHandle.AddrOfPinnedObject(); DMatrix4x4 doubleMatrix = new DMatrix4x4(matrix); // Unity has Y pointing screen up; Tango camera has Y pointing // screen down. Vector2 uvCoordinatesTango = new Vector2(uvCoordinates.x, 1.0f - uvCoordinates.y); int isValidPointInteger; int returnValue = TangoSupportAPI.TangoSupport_getDepthAtPointBilateralCameraIntrinsicsMatrixTransform( pointCloudXyzIj, cameraIntrinsics, colorImage, ref doubleMatrix, ref uvCoordinatesTango, out colorCameraPoint, out isValidPointInteger); isValidPoint = isValidPointInteger != 0; pointCloudHandle.Free(); return(returnValue); }
public static int TangoSupport_fitPlaneModelNearPointMatrixTransform( TangoXYZij pointCloud, TangoCameraIntrinsics cameraIntrinsics, ref DMatrix4x4 matrix, ref Vector2 uvCoordinates, out DVector3 intersectionPoint, double[] planeModel) { intersectionPoint = new DVector3(); return(Common.ErrorType.TANGO_SUCCESS); }
public static int TangoSupport_getDepthAtPointNearestNeighborMatrixTransform( TangoXYZij pointCloud, TangoCameraIntrinsics cameraIntrinsics, ref DMatrix4x4 matrix, ref Vector2 uvCoordinates, out Vector3 colorCameraPoint, out int isValidPoint) { colorCameraPoint = Vector3.zero; isValidPoint = 1; return(Common.ErrorType.TANGO_SUCCESS); }
/// <summary> /// Convert from float matrix to double matrix. /// </summary> /// <param name="m">Float matrix.</param> /// <returns>Double matrix.</returns> public static DMatrix4x4 FromMatrix4x4(Matrix4x4 m) { DMatrix4x4 dm = new DMatrix4x4((double)m.m00, (double)m.m01, (double)m.m02, (double)m.m03, (double)m.m10, (double)m.m11, (double)m.m12, (double)m.m13, (double)m.m20, (double)m.m21, (double)m.m22, (double)m.m23, (double)m.m30, (double)m.m31, (double)m.m32, (double)m.m33); return(dm); }
/// <summary> /// Fits a plane to a point cloud near a user-specified location. This /// occurs in two passes. First, all points in cloud within /// <c>maxPixelDistance</c> to <c>uvCoordinates</c> after projection are kept. Then a /// plane is fit to the subset cloud using RANSAC. After the initial fit /// all inliers from the original cloud are used to refine the plane /// model. /// </summary> /// <returns> /// Common.ErrorType.TANGO_SUCCESS on success, /// Common.ErrorType.TANGO_INVALID on invalid input, and /// Common.ErrorType.TANGO_ERROR on failure. /// </returns> /// <param name="pointCloud"> /// The point cloud. Cannot be null and must have at least three points. /// </param> /// <param name="pointCount"> /// The number of points to read from the point cloud. /// </param> /// <param name="timestamp">The timestamp of the point cloud.</param> /// <param name="cameraIntrinsics"> /// The camera intrinsics for the color camera. Cannot be null. /// </param> /// <param name="matrix"> /// Transformation matrix of the color camera with respect to the Unity /// World frame. /// </param> /// <param name="uvCoordinates"> /// The UV coordinates for the user selection. This is expected to be /// between (0.0, 0.0) and (1.0, 1.0). /// </param> /// <param name="intersectionPoint"> /// The output point in depth camera coordinates that the user selected. /// </param> /// <param name="plane">The plane fit.</param> public static int FitPlaneModelNearClick( Vector3[] pointCloud, int pointCount, double timestamp, TangoCameraIntrinsics cameraIntrinsics, ref Matrix4x4 matrix, Vector2 uvCoordinates, out Vector3 intersectionPoint, out Plane plane) { GCHandle pointCloudHandle = GCHandle.Alloc(pointCloud, GCHandleType.Pinned); TangoXYZij pointCloudXyzIj = new TangoXYZij(); pointCloudXyzIj.timestamp = timestamp; pointCloudXyzIj.xyz_count = pointCount; pointCloudXyzIj.xyz = pointCloudHandle.AddrOfPinnedObject(); DMatrix4x4 doubleMatrix = new DMatrix4x4(matrix); // Unity has Y pointing screen up; Tango camera has Y pointing // screen down. Vector2 uvCoordinatesTango = new Vector2(uvCoordinates.x, 1.0f - uvCoordinates.y); DVector3 doubleIntersectionPoint = new DVector3(); double[] planeArray = new double[4]; int returnValue = TangoSupportAPI.TangoSupport_fitPlaneModelNearPointMatrixTransform( pointCloudXyzIj, cameraIntrinsics, ref doubleMatrix, ref uvCoordinatesTango, out doubleIntersectionPoint, planeArray); if (returnValue != Common.ErrorType.TANGO_SUCCESS) { intersectionPoint = new Vector3(0.0f, 0.0f, 0.0f); plane = new Plane(new Vector3(0.0f, 0.0f, 0.0f), 0.0f); } else { intersectionPoint = doubleIntersectionPoint.ToVector3(); Vector3 normal = new Vector3((float)planeArray[0], (float)planeArray[1], (float)planeArray[2]); float distance = (float)planeArray[3] / normal.magnitude; plane = new Plane(normal, distance); } pointCloudHandle.Free(); return(returnValue); }
/// <summary> /// Create a translation and rotation matrix. /// </summary> /// <param name="translation">Translation as 3 doubles in a DVector3 struct.</param> /// <param name="orientation">Orientation as 4 doubles in a DVector4 struct.</param> /// <returns>Double matrix.</returns> public static DMatrix4x4 TR(DVector3 translation, DVector4 orientation) { double[] dTranslation = new double[3]; double[] dOrientation = new double[4]; dTranslation[0] = translation.x; dTranslation[1] = translation.y; dTranslation[2] = translation.z; dOrientation[0] = orientation.x; dOrientation[1] = orientation.y; dOrientation[2] = orientation.z; dOrientation[3] = orientation.w; return(DMatrix4x4.TR(dTranslation, dOrientation)); }
/// <summary> /// Create a translation and rotation matrix. /// </summary> /// <param name="translation">Translation as 3 doubles in a TangoTranslation struct.</param> /// <param name="orientation">Orientation as 4 doubles in a TangoOrientation struct.</param> /// <returns>Double matrix.</returns> public static DMatrix4x4 TR(Tango.TangoTranslation translation, Tango.TangoOrientation orientation) { double[] dTranslation = new double[3]; double[] dOrientation = new double[4]; dTranslation[0] = translation.x; dTranslation[1] = translation.y; dTranslation[2] = translation.z; dOrientation[0] = orientation.x; dOrientation[1] = orientation.y; dOrientation[2] = orientation.z; dOrientation[3] = orientation.w; return(DMatrix4x4.TR(dTranslation, dOrientation)); }
/// <summary> /// Retrive translation vector from matrix. /// </summary> /// <param name="translation">Translation in vector double array, x, y, z.</param> /// <param name="orientation">Orientation in quaternion double array, x, y, z, w.</param> /// <returns>Double matrix.</returns> private static DMatrix4x4 TR(double[] translation, double[] orientation) { DMatrix4x4 dmat = new DMatrix4x4(); double sqw = orientation[3] * orientation[3]; double sqx = orientation[0] * orientation[0]; double sqy = orientation[1] * orientation[1]; double sqz = orientation[2] * orientation[2]; // invs (inverse square length) is only required if quaternion is not already normalised double invs = 1 / (sqx + sqy + sqz + sqw); dmat.m00 = (sqx - sqy - sqz + sqw) * invs; dmat.m11 = (-sqx + sqy - sqz + sqw) * invs; dmat.m22 = (-sqx - sqy + sqz + sqw) * invs; double tmp1 = orientation[0] * orientation[1]; double tmp2 = orientation[2] * orientation[3]; dmat.m10 = 2.0 * (tmp1 + tmp2) * invs; dmat.m01 = 2.0 * (tmp1 - tmp2) * invs; tmp1 = orientation[0] * orientation[2]; tmp2 = orientation[1] * orientation[3]; dmat.m20 = 2.0 * (tmp1 - tmp2) * invs; dmat.m02 = 2.0 * (tmp1 + tmp2) * invs; tmp1 = orientation[1] * orientation[2]; tmp2 = orientation[0] * orientation[3]; dmat.m21 = 2.0 * (tmp1 + tmp2) * invs; dmat.m12 = 2.0 * (tmp1 - tmp2) * invs; dmat.m03 = translation[0]; dmat.m13 = translation[1]; dmat.m23 = translation[2]; dmat.m33 = 1.0; return(dmat); }
public static extern int TangoSupport_getDepthAtPointNearestNeighborMatrixTransform( TangoXYZij pointCloud, TangoCameraIntrinsics cameraIntrinsics, ref DMatrix4x4 matrix, ref Vector2 uvCoordinates, out Vector3 colorCameraPoint, [Out, MarshalAs(UnmanagedType.I4)] out int isValidPoint);
public static extern int TangoSupport_fitPlaneModelNearPointMatrixTransform( TangoXYZij pointCloud, TangoCameraIntrinsics cameraIntrinsics, ref DMatrix4x4 matrix, ref Vector2 uvCoordinates, out DVector3 intersectionPoint, [Out, MarshalAs(UnmanagedType.LPArray, SizeConst = 4)] double[] planeModel);