/// <summary> /// Draw keypoints. /// </summary> /// <param name="image">Source image.</param> /// <param name="keypoints">Keypoints from the source image.</param> /// <param name="outImage">Output image. Its content depends on the flags value defining what is drawn in the output image. See possible flags bit values below.</param> /// <param name="color">Color of keypoints.</param> /// <param name="flags">Flags setting drawing features. Possible flags bit values are defined by DrawMatchesFlags.</param> public static void DrawKeypoints( InputArray image, IEnumerable <KeyPoint> keypoints, InputOutputArray outImage, Scalar?color = null, DrawMatchesFlags flags = DrawMatchesFlags.Default) { if (image == null) { throw new ArgumentNullException(nameof(image)); } if (outImage == null) { throw new ArgumentNullException(nameof(outImage)); } if (keypoints == null) { throw new ArgumentNullException(nameof(keypoints)); } image.ThrowIfDisposed(); outImage.ThrowIfDisposed(); var keypointsArray = keypoints as KeyPoint[] ?? keypoints.ToArray(); var color0 = color.GetValueOrDefault(Scalar.All(-1)); NativeMethods.HandleException( NativeMethods.features2d_drawKeypoints(image.CvPtr, keypointsArray, keypointsArray.Length, outImage.CvPtr, color0, (int)flags)); GC.KeepAlive(image); GC.KeepAlive(outImage); }
/// <summary> /// Finds the geometric transform (warp) between two images in terms of the ECC criterion @cite EP08 . /// </summary> /// <param name="templateImage">single-channel template image; CV_8U or CV_32F array.</param> /// <param name="inputImage">single-channel input image which should be warped with the final warpMatrix in /// order to provide an image similar to templateImage, same type as templateImage.</param> /// <param name="warpMatrix">floating-point \f$2\times 3\f$ or \f$3\times 3\f$ mapping matrix (warp).</param> /// <param name="motionType">parameter, specifying the type of motion</param> /// <param name="criteria">parameter, specifying the termination criteria of the ECC algorithm; /// criteria.epsilon defines the threshold of the increment in the correlation coefficient between two /// iterations(a negative criteria.epsilon makes criteria.maxcount the only termination criterion). /// Default values are shown in the declaration above.</param> /// <param name="inputMask">An optional mask to indicate valid values of inputImage.</param> /// <returns></returns> public static double FindTransformECC( InputArray templateImage, InputArray inputImage, InputOutputArray warpMatrix, MotionTypes motionType = MotionTypes.Affine, TermCriteria?criteria = null, InputArray?inputMask = null) { if (templateImage == null) { throw new ArgumentNullException(nameof(templateImage)); } if (inputImage == null) { throw new ArgumentNullException(nameof(inputImage)); } if (warpMatrix == null) { throw new ArgumentNullException(nameof(warpMatrix)); } templateImage.ThrowIfDisposed(); inputImage.ThrowIfDisposed(); warpMatrix.ThrowIfDisposed(); inputMask?.ThrowIfDisposed(); var criteriaValue = criteria.GetValueOrDefault(new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 50, 0.001)); NativeMethods.HandleException( NativeMethods.video_findTransformECC2( templateImage.CvPtr, inputImage.CvPtr, warpMatrix.CvPtr, (int)motionType, criteriaValue, inputMask?.CvPtr ?? IntPtr.Zero, out var ret)); GC.KeepAlive(templateImage); GC.KeepAlive(inputImage); GC.KeepAlive(warpMatrix); GC.KeepAlive(inputMask); return(ret); }
/// <summary> /// Finds the geometric transform (warp) between two images in terms of the ECC criterion @cite EP08 . /// </summary> /// <param name="templateImage">single-channel template image; CV_8U or CV_32F array.</param> /// <param name="inputImage">single-channel input image which should be warped with the final warpMatrix in /// order to provide an image similar to templateImage, same type as templateImage.</param> /// <param name="warpMatrix">floating-point \f$2\times 3\f$ or \f$3\times 3\f$ mapping matrix (warp).</param> /// <param name="motionType">parameter, specifying the type of motion</param> /// <param name="criteria">parameter, specifying the termination criteria of the ECC algorithm; /// criteria.epsilon defines the threshold of the increment in the correlation coefficient between two /// iterations(a negative criteria.epsilon makes criteria.maxcount the only termination criterion). /// Default values are shown in the declaration above.</param> /// <param name="inputMask">An optional mask to indicate valid values of inputImage.</param> /// <param name="gaussFiltSize">An optional value indicating size of gaussian blur filter; (DEFAULT: 5)</param> /// <returns></returns> public static double FindTransformECC( InputArray templateImage, InputArray inputImage, InputOutputArray warpMatrix, MotionTypes motionType, TermCriteria criteria, InputArray?inputMask = null, int gaussFiltSize = 5) { if (templateImage == null) { throw new ArgumentNullException(nameof(templateImage)); } if (inputImage == null) { throw new ArgumentNullException(nameof(inputImage)); } if (warpMatrix == null) { throw new ArgumentNullException(nameof(warpMatrix)); } templateImage.ThrowIfDisposed(); inputImage.ThrowIfDisposed(); warpMatrix.ThrowIfDisposed(); inputMask?.ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_findTransformECC1( templateImage.CvPtr, inputImage.CvPtr, warpMatrix.CvPtr, (int)motionType, criteria, inputMask?.CvPtr ?? IntPtr.Zero, gaussFiltSize, out var ret)); GC.KeepAlive(templateImage); GC.KeepAlive(inputImage); GC.KeepAlive(warpMatrix); GC.KeepAlive(inputMask); return(ret); }
/// <summary> /// clusters the input data using k-Means algorithm /// </summary> /// <param name="data"></param> /// <param name="k"></param> /// <param name="bestLabels"></param> /// <param name="criteria"></param> /// <param name="attempts"></param> /// <param name="flags"></param> /// <param name="centers"></param> /// <returns></returns> public static double Kmeans(InputArray data, int k, InputOutputArray bestLabels, TermCriteria criteria, int attempts, KMeansFlags flags, OutputArray centers = null) { if (data == null) throw new ArgumentNullException("data"); if (bestLabels == null) throw new ArgumentNullException("bestLabels"); data.ThrowIfDisposed(); bestLabels.ThrowIfDisposed(); double ret = NativeMethods.core_kmeans(data.CvPtr, k, bestLabels.CvPtr, criteria, attempts, (int)flags, ToPtr(centers)); bestLabels.Fix(); if(centers != null) centers.Fix(); GC.KeepAlive(data); return ret; }
/// <summary> /// draws one or more polygonal curves /// </summary> /// <param name="img"></param> /// <param name="pts"></param> /// <param name="isClosed"></param> /// <param name="color"></param> /// <param name="thickness"></param> /// <param name="lineType"></param> /// <param name="shift"></param> public static void Polylines( InputOutputArray img, InputArray pts, bool isClosed, Scalar color, int thickness = 1, LineTypes lineType = LineTypes.Link8, int shift = 0) { if (img == null) throw new ArgumentNullException(nameof(img)); if (pts == null) throw new ArgumentNullException(nameof(pts)); img.ThrowIfDisposed(); pts.ThrowIfDisposed(); NativeMethods.imgproc_polylines_InputOutputArray( img.CvPtr, pts.CvPtr, isClosed ? 1 : 0, color, thickness, (int)lineType, shift); img.Fix(); GC.KeepAlive(pts); }
/// <summary> /// renders text string in the image /// </summary> /// <param name="img"></param> /// <param name="text"></param> /// <param name="org"></param> /// <param name="fontFace"></param> /// <param name="fontScale"></param> /// <param name="color"></param> /// <param name="thickness"></param> /// <param name="lineType"></param> /// <param name="bottomLeftOrigin"></param> public static void PutText(InputOutputArray img, string text, Point org, HersheyFonts fontFace, double fontScale, Scalar color, int thickness = 1, LineTypes lineType = LineTypes.Link8, bool bottomLeftOrigin = false) { if (img == null) throw new ArgumentNullException(nameof(img)); if (String.IsNullOrEmpty(text)) throw new ArgumentNullException(text); img.ThrowIfDisposed(); NativeMethods.core_putText(img.CvPtr, text, org, (int)fontFace, fontScale, color, thickness, (int)lineType, bottomLeftOrigin ? 1 : 0); img.Fix(); }
/// <summary> /// 1つ,または複数のポリゴンで区切られた領域を塗りつぶします. /// </summary> /// <param name="img">画像</param> /// <param name="pts">ポリゴンの配列.各要素は,点の配列で表現されます.</param> /// <param name="color">ポリゴンの色.</param> /// <param name="lineType">ポリゴンの枠線の種類,</param> /// <param name="shift">ポリゴンの頂点座標において,小数点以下の桁を表すビット数.</param> /// <param name="offset"></param> #else /// <summary> /// Fills the area bounded by one or more polygons /// </summary> /// <param name="img">Image</param> /// <param name="pts">Array of polygons, each represented as an array of points</param> /// <param name="color">Polygon color</param> /// <param name="lineType">Type of the polygon boundaries</param> /// <param name="shift">The number of fractional bits in the vertex coordinates</param> /// <param name="offset"></param> #endif public static void FillPoly( InputOutputArray img, InputArray pts, Scalar color, LineTypes lineType = LineTypes.Link8, int shift = 0, Point? offset = null) { if (img == null) throw new ArgumentNullException(nameof(img)); if (pts == null) throw new ArgumentNullException(nameof(pts)); img.ThrowIfDisposed(); pts.ThrowIfDisposed(); Point offset0 = offset.GetValueOrDefault(new Point()); NativeMethods.imgproc_fillPoly_InputOutputArray( img.CvPtr, pts.CvPtr, color, (int)lineType, shift, offset0); GC.KeepAlive(pts); img.Fix(); }
/// <summary> /// 塗りつぶされた凸ポリゴンを描きます. /// </summary> /// <param name="img">画像</param> /// <param name="pts">ポリゴンの頂点.</param> /// <param name="color">ポリゴンの色.</param> /// <param name="lineType">ポリゴンの枠線の種類,</param> /// <param name="shift">ポリゴンの頂点座標において,小数点以下の桁を表すビット数.</param> #else /// <summary> /// Fills a convex polygon. /// </summary> /// <param name="img">Image</param> /// <param name="pts">The polygon vertices</param> /// <param name="color">Polygon color</param> /// <param name="lineType">Type of the polygon boundaries</param> /// <param name="shift">The number of fractional bits in the vertex coordinates</param> #endif public static void FillConvexPoly(InputOutputArray img, InputArray pts, Scalar color, LineTypes lineType = LineTypes.Link8, int shift = 0) { if (img == null) throw new ArgumentNullException(nameof(img)); if (pts == null) throw new ArgumentNullException(nameof(pts)); img.ThrowIfDisposed(); pts.ThrowIfDisposed(); NativeMethods.imgproc_fillConvexPoly_InputOutputArray( img.CvPtr, pts.CvPtr, color, (int)lineType, shift); GC.KeepAlive(img); GC.KeepAlive(pts); }
/// <summary> /// 枠だけの楕円,もしくは塗りつぶされた楕円を描画する /// </summary> /// <param name="img">楕円が描かれる画像.</param> /// <param name="box">描画したい楕円を囲む矩形領域.</param> /// <param name="color">楕円の色.</param> /// <param name="thickness">楕円境界線の幅.[既定値は1]</param> /// <param name="lineType">楕円境界線の種類.[既定値はLineType.Link8]</param> #else /// <summary> /// Draws simple or thick elliptic arc or fills ellipse sector /// </summary> /// <param name="img">Image. </param> /// <param name="box">The enclosing box of the ellipse drawn </param> /// <param name="color">Ellipse color. </param> /// <param name="thickness">Thickness of the ellipse boundary. [By default this is 1]</param> /// <param name="lineType">Type of the ellipse boundary. [By default this is LineType.Link8]</param> #endif public static void Ellipse(InputOutputArray img, RotatedRect box, Scalar color, int thickness = 1, LineTypes lineType = LineTypes.Link8) { if (img == null) throw new ArgumentNullException(nameof(img)); img.ThrowIfDisposed(); NativeMethods.imgproc_ellipse2(img.CvPtr, box, color, thickness, (int)lineType); img.Fix(); }
/// <summary> /// 円を描画する /// </summary> /// <param name="img">画像</param> /// <param name="center">円の中心</param> /// <param name="radius">円の半径</param> /// <param name="color">円の色</param> /// <param name="thickness">線の幅.負の値を指定した場合は塗りつぶされる.[既定値は1]</param> /// <param name="lineType">線の種類. [既定値はLineType.Link8]</param> /// <param name="shift">中心座標と半径の小数点以下の桁を表すビット数. [既定値は0]</param> #else /// <summary> /// Draws a circle /// </summary> /// <param name="img">Image where the circle is drawn. </param> /// <param name="center">Center of the circle. </param> /// <param name="radius">Radius of the circle. </param> /// <param name="color">Circle color. </param> /// <param name="thickness">Thickness of the circle outline if positive, otherwise indicates that a filled circle has to be drawn. [By default this is 1]</param> /// <param name="lineType">Type of the circle boundary. [By default this is LineType.Link8]</param> /// <param name="shift">Number of fractional bits in the center coordinates and radius value. [By default this is 0]</param> #endif public static void Circle(InputOutputArray img, Point center, int radius, Scalar color, int thickness = 1, LineTypes lineType = LineTypes.Link8, int shift = 0) { if (img == null) throw new ArgumentNullException(nameof(img)); img.ThrowIfDisposed(); NativeMethods.imgproc_circle(img.CvPtr, center, radius, color, thickness, (int)lineType, shift); img.Fix(); }
/// <summary> /// finds intrinsic and extrinsic parameters of a stereo camera /// </summary> /// <param name="objectPoints">Vector of vectors of the calibration pattern points.</param> /// <param name="imagePoints1">Vector of vectors of the projections of the calibration pattern points, observed by the first camera.</param> /// <param name="imagePoints2">Vector of vectors of the projections of the calibration pattern points, observed by the second camera.</param> /// <param name="cameraMatrix1">Input/output first camera matrix</param> /// <param name="distCoeffs1">Input/output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements. /// The output vector length depends on the flags.</param> /// <param name="cameraMatrix2"> Input/output second camera matrix. The parameter is similar to cameraMatrix1 .</param> /// <param name="distCoeffs2">Input/output lens distortion coefficients for the second camera. The parameter is similar to distCoeffs1 .</param> /// <param name="imageSize">Size of the image used only to initialize intrinsic camera matrix.</param> /// <param name="R">Output rotation matrix between the 1st and the 2nd camera coordinate systems.</param> /// <param name="T">Output translation vector between the coordinate systems of the cameras.</param> /// <param name="E">Output essential matrix.</param> /// <param name="F">Output fundamental matrix.</param> /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param> /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param> /// <returns></returns> public static double StereoCalibrate(IEnumerable<InputArray> objectPoints, IEnumerable<InputArray> imagePoints1, IEnumerable<InputArray> imagePoints2, InputOutputArray cameraMatrix1, InputOutputArray distCoeffs1, InputOutputArray cameraMatrix2, InputOutputArray distCoeffs2, Size imageSize, OutputArray R, OutputArray T, OutputArray E, OutputArray F, CalibrationFlags flags = CalibrationFlags.FixIntrinsic, TermCriteria? criteria = null) { if (objectPoints == null) throw new ArgumentNullException("objectPoints"); if (imagePoints1 == null) throw new ArgumentNullException("imagePoints1"); if (imagePoints2 == null) throw new ArgumentNullException("imagePoints2"); if (cameraMatrix1 == null) throw new ArgumentNullException("cameraMatrix1"); if (distCoeffs1 == null) throw new ArgumentNullException("distCoeffs1"); if (cameraMatrix2 == null) throw new ArgumentNullException("cameraMatrix2"); if (distCoeffs2 == null) throw new ArgumentNullException("distCoeffs2"); cameraMatrix1.ThrowIfDisposed(); distCoeffs1.ThrowIfDisposed(); cameraMatrix2.ThrowIfDisposed(); distCoeffs2.ThrowIfDisposed(); cameraMatrix1.ThrowIfNotReady(); cameraMatrix2.ThrowIfNotReady(); distCoeffs1.ThrowIfNotReady(); distCoeffs2.ThrowIfNotReady(); IntPtr[] opPtrs = EnumerableEx.SelectPtrs(objectPoints); IntPtr[] ip1Ptrs = EnumerableEx.SelectPtrs(imagePoints1); IntPtr[] ip2Ptrs = EnumerableEx.SelectPtrs(imagePoints2); TermCriteria criteria0 = criteria.GetValueOrDefault( new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 30, 1e-6)); double result = NativeMethods.calib3d_stereoCalibrate_InputArray( opPtrs, opPtrs.Length, ip1Ptrs, ip1Ptrs.Length, ip2Ptrs, ip2Ptrs.Length, cameraMatrix1.CvPtr, distCoeffs1.CvPtr, cameraMatrix2.CvPtr, distCoeffs2.CvPtr, imageSize, ToPtr(R), ToPtr(T), ToPtr(E), ToPtr(F), (int)flags, criteria0); cameraMatrix1.Fix(); distCoeffs1.Fix(); cameraMatrix2.Fix(); distCoeffs2.Fix(); if (R != null) R.Fix(); if (T != null) T.Fix(); if (E != null) E.Fix(); if (F != null) F.Fix(); return result; }