/// <inheritdoc/> protected override void DisposeObject() { if (_sharedPtr != IntPtr.Zero) { StructuredLightInvoke.cveSinusoidalPatternRelease(ref _sharedPtr); _ptr = IntPtr.Zero; _structuredLightPatternPtr = IntPtr.Zero; _algorithmPtr = IntPtr.Zero; } }
/// <summary> /// Create a new GrayCodePattern /// </summary> /// <param name="width">The width of the projector.</param> /// <param name="height">The height of the projector.</param> public GrayCodePattern( int width = 1024, int height = 768) { _ptr = StructuredLightInvoke.cveGrayCodePatternCreate( width, height, ref _sharedPtr, ref _structuredLightPatternPtr, ref _algorithmPtr); }
/// <summary> /// For a (x,y) pixel of a camera returns the corresponding projector pixel. The function decodes each pixel in the pattern images acquired by a camera into their corresponding decimal numbers representing the projector's column and row, providing a mapping between camera's and projector's pixel. /// </summary> /// <param name="patternImages">The pattern images acquired by the camera, stored in a grayscale VectorOfMat.</param> /// <param name="x">x coordinate of the image pixel.</param> /// <param name="y">y coordinate of the image pixel.</param> /// <returns>Projector's pixel corresponding to the camera's pixel: projPix.x and projPix.y are the image coordinates of the projector's pixel corresponding to the pixel being decoded in a camera. If failed to calculate the project, null will be returned.</returns> public Point?GetProjPixel(IInputArray patternImages, int x, int y) { using (InputArray iaPatternImages = patternImages.GetInputArray()) { Point projPix = new Point(); bool found = StructuredLightInvoke.cveGrayCodePatternGetProjPixel(_ptr, iaPatternImages, x, y, ref projPix); if (!found) { return(null); } return(projPix); } }
/// <summary> /// Unwrap the wrapped phase map to remove phase ambiguities. /// </summary> /// <param name="wrappedPhaseMap">The wrapped phase map computed from the pattern.</param> /// <param name="unwrappedPhaseMap">The unwrapped phase map used to find correspondences between the two devices.</param> /// <param name="camSize">Resolution of the camera.</param> /// <param name="shadowMask">Mask used to discard shadow regions.</param> public void UnwrapPhaseMap( IInputArray wrappedPhaseMap, IOutputArray unwrappedPhaseMap, Size camSize, IInputArray shadowMask = null) { using (InputArray iaWrappedPhaseMap = wrappedPhaseMap.GetInputArray()) using (OutputArray oaUnwrappedPhaseMap = unwrappedPhaseMap.GetOutputArray()) using (InputArray iaShadowMask = shadowMask == null ? InputArray.GetEmpty() : shadowMask.GetInputArray()) { StructuredLightInvoke.cveSinusoidalPatternUnwrapPhaseMap( _ptr, iaWrappedPhaseMap, oaUnwrappedPhaseMap, ref camSize, iaShadowMask); } }
/// <summary> /// Compute a wrapped phase map from sinusoidal patterns. /// </summary> /// <param name="patternImages">Input data to compute the wrapped phase map.</param> /// <param name="wrappedPhaseMap">Wrapped phase map obtained through one of the three methods.</param> /// <param name="shadowMask">Mask used to discard shadow regions.</param> /// <param name="fundamental">Fundamental matrix used to compute epipolar lines and ease the matching step.</param> public void ComputePhaseMap( IInputArrayOfArrays patternImages, IOutputArray wrappedPhaseMap, IOutputArray shadowMask = null, IInputArray fundamental = null) { using (InputArray iaPatternImages = patternImages.GetInputArray()) using (OutputArray oaWrappedPhaseMap = wrappedPhaseMap.GetOutputArray()) using (OutputArray oaShadowMask = shadowMask == null ? OutputArray.GetEmpty() : shadowMask.GetOutputArray()) using (InputArray iaFundamental = fundamental == null ? InputArray.GetEmpty() : fundamental.GetInputArray()) { StructuredLightInvoke.cveSinusoidalPatternComputePhaseMap( _ptr, iaPatternImages, oaWrappedPhaseMap, oaShadowMask, iaFundamental); } }