public Float3?Convert2DTo3D(Float2 sourcePoint2D, float sourceDepthMm, CalibrationGeometry sourceCamera, CalibrationGeometry targetCamera) { if (sourceDepthMm <= float.Epsilon) { throw new ArgumentOutOfRangeException(nameof(sourceDepthMm)); } if (!sourceCamera.IsCamera()) { throw new ArgumentOutOfRangeException(nameof(sourceCamera)); } if (!targetCamera.IsCamera()) { throw new ArgumentOutOfRangeException(nameof(targetCamera)); } var res = NativeApi.Calibration2DTo3D(ref this, ref sourcePoint2D, sourceDepthMm, sourceCamera, targetCamera, out var targetPoint3DMm, out var valid); if (res == NativeCallResults.Result.Failed) { throw new InvalidOperationException("Cannot transform 2D point to 3D point: invalid calibration data."); } if (!valid) { return(null); } return(targetPoint3DMm); }
/// <summary>Transforms the depth image into 3 planar images representing X, Y and Z-coordinates of corresponding 3D points.</summary> /// <param name="depthImage">Input depth image to be transformed to point cloud. Not <see langword="null"/>. Must have resolution of <paramref name="camera"/> camera.</param> /// <param name="camera">Geometry in which depth map was computed (<see cref="CalibrationGeometry.Depth"/> or <see cref="CalibrationGeometry.Color"/>).</param> /// <param name="xyzImage">Output XYZ image for point cloud data. Not <see langword="null"/>. Must have resolution of <paramref name="camera"/> camera.</param> /// <remarks><para> /// <paramref name="depthImage"/> must be of format <see cref="ImageFormat.Depth16"/>. /// </para><para> /// The <paramref name="camera"/> parameter tells the function what the perspective of the <paramref name="depthImage"/> is. /// If the <paramref name="depthImage"/> was captured directly from the depth camera, the value should be <see cref="CalibrationGeometry.Depth"/>. /// If the <paramref name="depthImage"/> is the result of a transformation into the color camera's coordinate space using /// <see cref="DepthImageToColorCamera(Image, Image)"/>, the value should be <see cref="CalibrationGeometry.Color"/>. /// </para><para> /// The format of <paramref name="xyzImage"/> must be <see cref="ImageFormat.Custom"/>. The width and height of <paramref name="xyzImage"/> must match the /// width and height of <paramref name="depthImage"/>. <paramref name="xyzImage"/> must have a stride in bytes of at least 6 times its width in pixels. /// </para><para> /// Each pixel of the <paramref name="xyzImage"/> consists of three <see cref="short"/> values, totaling 6 bytes. The three <see cref="short"/> values are the /// X, Y, and Z values of the point. /// </para></remarks> /// <exception cref="ArgumentOutOfRangeException"><paramref name="camera"/> is not a camera.</exception> /// <exception cref="ArgumentNullException"><paramref name="depthImage"/> is <see langword="null"/> or <paramref name="xyzImage"/> is <see langword="null"/>.</exception> /// <exception cref="ArgumentException"><paramref name="depthImage"/> or <paramref name="xyzImage"/> has invalid format or resolution.</exception> /// <exception cref="InvalidOperationException">Failed to transform specified depth image to point cloud.</exception> /// <exception cref="ObjectDisposedException">This method cannot be called for disposed object.</exception> public void DepthImageToPointCloud(Image depthImage, CalibrationGeometry camera, Image xyzImage) { if (camera == CalibrationGeometry.Depth) { CheckImageParameter(nameof(depthImage), depthImage, ImageFormat.Depth16, DepthMode); CheckImageParameter(nameof(xyzImage), xyzImage, ImageFormat.Custom, DepthMode); } else if (camera == CalibrationGeometry.Color) { CheckImageParameter(nameof(depthImage), depthImage, ImageFormat.Depth16, ColorResolution); CheckImageParameter(nameof(xyzImage), xyzImage, ImageFormat.Custom, ColorResolution); } else { throw new ArgumentOutOfRangeException(nameof(camera)); } if (xyzImage.StrideBytes < 3 * sizeof(short) * xyzImage.WidthPixels) { throw new ArgumentException($"{xyzImage} must have a stride in bytes of at least 6 times its width in pixels."); } var res = NativeApi.TransformationDepthImageToPointCloud(handle.ValueNotDisposed, Image.ToHandle(depthImage), camera, Image.ToHandle(xyzImage)); if (res != NativeCallResults.Result.Succeeded) { throw new InvalidOperationException($"Failed to transform specified depth image to point cloud in coordinates of {camera} camera."); } }
/// <summary>Transform a 3D point of a source coordinate system into a 3D point of the target coordinate system.</summary> /// <param name="sourcePoint3DMm">The 3D coordinates in millimeters representing a point in <paramref name="sourceCameraOrSensor"/>.</param> /// <param name="sourceCameraOrSensor">The current coordinate system of camera or IMU sensor.</param> /// <param name="targetCameraOrSensor">The target coordinate system of camera or IMU sensor.</param> /// <returns>The new 3D coordinates of the input point in the coordinate space <paramref name="targetCameraOrSensor"/> in millimeters.</returns> /// <remarks> /// This function is used to transform 3D points between depth and color camera coordinate systems. The function uses the /// extrinsic camera calibration. It computes the output via multiplication with a precomputed matrix encoding a 3D /// rotation and a 3D translation. If <paramref name="sourceCameraOrSensor"/> and <paramref name="targetCameraOrSensor"/> are the same, then result will /// be identical to <paramref name="sourcePoint3DMm"/>. /// </remarks> /// <exception cref="InvalidOperationException"> /// Cannot perform transformation. Most likely, calibration data is invalid. /// </exception> public Float3 Convert3DTo3D(Float3 sourcePoint3DMm, CalibrationGeometry sourceCameraOrSensor, CalibrationGeometry targetCameraOrSensor) { if (!sourceCameraOrSensor.IsCamera() && !sourceCameraOrSensor.IsImuPart()) { throw new ArgumentOutOfRangeException(nameof(sourceCameraOrSensor)); } if (!targetCameraOrSensor.IsCamera() && !targetCameraOrSensor.IsImuPart()) { throw new ArgumentOutOfRangeException(nameof(targetCameraOrSensor)); } var res = NativeApi.Calibration3DTo3D(ref this, ref sourcePoint3DMm, sourceCameraOrSensor, targetCameraOrSensor, out var targetPoint3DMm); if (res != NativeCallResults.Result.Succeeded) { throw new InvalidOperationException("Cannot transform 3D point to 3D point: invalid calibration data."); } return(targetPoint3DMm); }
/// <summary>Helper method to get mutual extrinsics parameters for a given couple of sensors in Azure Kinect device.</summary> /// <param name="sourceSensor">Source coordinate system for transformation.</param> /// <param name="targetSensor">Destination coordinate system for transformation.</param> /// <returns>Extracted parameters of transformation from <paramref name="sourceSensor"/> to <paramref name="targetSensor"/>.</returns> /// <seealso cref="SetExtrinsics(CalibrationGeometry, CalibrationGeometry, CalibrationExtrinsics)"/> /// <seealso cref="Extrinsics"/> public CalibrationExtrinsics GetExtrinsics(CalibrationGeometry sourceSensor, CalibrationGeometry targetSensor) => Extrinsics ![(int)sourceSensor * (int)CalibrationGeometry.Count + (int)targetSensor];
public void SetExtrinsics(CalibrationGeometry sourceSensor, CalibrationGeometry targetSensor, CalibrationExtrinsics extrinsics) => Extrinsics[(int)sourceSensor * (int)CalibrationGeometry.Count + (int)targetSensor] = extrinsics;
public static bool IsImuPart(this CalibrationGeometry geometry) => geometry == CalibrationGeometry.Gyro || geometry == CalibrationGeometry.Accel;
public static bool IsCamera(this CalibrationGeometry geometry) => geometry == CalibrationGeometry.Depth || geometry == CalibrationGeometry.Color;