예제 #1
1
        /// <summary>
        /// Finds an object center, size, and orientation.
        /// </summary>
        /// <param name="probImage">Back projection of the object histogram. </param>
        /// <param name="window">Initial search window.</param>
        /// <param name="criteria">Stop criteria for the underlying MeanShift() .</param>
        /// <returns></returns>
        public static RotatedRect CamShift(
            InputArray probImage, ref Rect window, TermCriteria criteria)
        {
            if (probImage == null)
                throw new ArgumentNullException(nameof(probImage));
            probImage.ThrowIfDisposed();

            RotatedRect result = NativeMethods.video_CamShift(
                probImage.CvPtr, ref window, criteria);
            return result;
        }
예제 #2
0
        /// <summary>
        /// Finds an object on a back projection image.
        /// </summary>
        /// <param name="probImage">Back projection of the object histogram.</param>
        /// <param name="window">Initial search window.</param>
        /// <param name="criteria">Stop criteria for the iterative search algorithm.</param>
        /// <returns>Number of iterations CAMSHIFT took to converge.</returns>
        public static int MeanShift(
            InputArray probImage, ref Rect window, TermCriteria criteria)
        {
            if (probImage == null)
                throw new ArgumentNullException("probImage");
            probImage.ThrowIfDisposed();

            int result = NativeMethods.video_meanShift(
                probImage.CvPtr, ref window, criteria);
            return result;
        }
예제 #3
0
        /// <summary>
        /// computes sparse optical flow using multi-scale Lucas-Kanade algorithm
        /// </summary>
        /// <param name="prevImg"></param>
        /// <param name="nextImg"></param>
        /// <param name="prevPts"></param>
        /// <param name="nextPts"></param>
        /// <param name="status"></param>
        /// <param name="err"></param>
        /// <param name="winSize"></param>
        /// <param name="maxLevel"></param>
        /// <param name="criteria"></param>
        /// <param name="flags"></param>
        /// <param name="minEigThreshold"></param>
        public static void CalcOpticalFlowPyrLK(
            InputArray prevImg, InputArray nextImg,
            Point2f[] prevPts, ref Point2f[] nextPts,
            out byte[] status, out float[] err,
            Size?winSize           = null,
            int maxLevel           = 3,
            TermCriteria?criteria  = null,
            OpticalFlowFlags flags = OpticalFlowFlags.None,
            double minEigThreshold = 1e-4)
        {
            if (prevImg == null)
            {
                throw new ArgumentNullException(nameof(prevImg));
            }
            if (nextImg == null)
            {
                throw new ArgumentNullException(nameof(nextImg));
            }
            if (prevPts == null)
            {
                throw new ArgumentNullException(nameof(prevPts));
            }
            if (nextPts == null)
            {
                throw new ArgumentNullException(nameof(nextPts));
            }
            prevImg.ThrowIfDisposed();
            nextImg.ThrowIfDisposed();

            Size         winSize0  = winSize.GetValueOrDefault(new Size(21, 21));
            TermCriteria criteria0 = criteria.GetValueOrDefault(
                TermCriteria.Both(30, 0.01));

            using (var nextPtsVec = new VectorOfPoint2f())
                using (var statusVec = new VectorOfByte())
                    using (var errVec = new VectorOfFloat())
                    {
                        NativeMethods.video_calcOpticalFlowPyrLK_vector(
                            prevImg.CvPtr, nextImg.CvPtr, prevPts, prevPts.Length,
                            nextPtsVec.CvPtr, statusVec.CvPtr, errVec.CvPtr,
                            winSize0, maxLevel, criteria0, (int)flags, minEigThreshold);
                        GC.KeepAlive(prevImg);
                        GC.KeepAlive(nextImg);
                        nextPts = nextPtsVec.ToArray();
                        status  = statusVec.ToArray();
                        err     = errVec.ToArray();
                    }
        }
예제 #4
0
        /// <summary>
        /// Finds the geometric transform (warp) between two images in terms of the ECC criterion @cite EP08 .
        /// </summary>
        /// <param name="templateImage">single-channel template image; CV_8U or CV_32F array.</param>
        /// <param name="inputImage">single-channel input image which should be warped with the final warpMatrix in
        /// order to provide an image similar to templateImage, same type as templateImage.</param>
        /// <param name="warpMatrix">floating-point \f$2\times 3\f$ or \f$3\times 3\f$ mapping matrix (warp).</param>
        /// <param name="motionType">parameter, specifying the type of motion</param>
        /// <param name="criteria">parameter, specifying the termination criteria of the ECC algorithm;
        /// criteria.epsilon defines the threshold of the increment in the correlation coefficient between two
        /// iterations(a negative criteria.epsilon makes criteria.maxcount the only termination criterion).
        /// Default values are shown in the declaration above.</param>
        /// <param name="inputMask">An optional mask to indicate valid values of inputImage.</param>
        /// <param name="gaussFiltSize">An optional value indicating size of gaussian blur filter; (DEFAULT: 5)</param>
        /// <returns></returns>
        public static double FindTransformECC(
            InputArray templateImage,
            InputArray inputImage,
            InputOutputArray warpMatrix,
            MotionTypes motionType,
            TermCriteria criteria,
            InputArray?inputMask = null,
            int gaussFiltSize    = 5)
        {
            if (templateImage == null)
            {
                throw new ArgumentNullException(nameof(templateImage));
            }
            if (inputImage == null)
            {
                throw new ArgumentNullException(nameof(inputImage));
            }
            if (warpMatrix == null)
            {
                throw new ArgumentNullException(nameof(warpMatrix));
            }
            templateImage.ThrowIfDisposed();
            inputImage.ThrowIfDisposed();
            warpMatrix.ThrowIfDisposed();
            inputMask?.ThrowIfDisposed();

            NativeMethods.HandleException(
                NativeMethods.video_findTransformECC1(
                    templateImage.CvPtr, inputImage.CvPtr, warpMatrix.CvPtr, (int)motionType,
                    criteria, inputMask?.CvPtr ?? IntPtr.Zero, gaussFiltSize,
                    out var ret));

            GC.KeepAlive(templateImage);
            GC.KeepAlive(inputImage);
            GC.KeepAlive(warpMatrix);
            GC.KeepAlive(inputMask);
            return(ret);
        }
예제 #5
0
 public static extern double calib3d_stereoCalibrate_array(
     IntPtr[] objectPoints, int opSize1, int[] opSizes2,
     IntPtr[] imagePoints1, int ip1Size1, int[] ip1Sizes2,
     IntPtr[] imagePoints2, int ip2Size1, int[] ip2Sizes2,
     [In, Out] double[,] cameraMatrix1,
     [In, Out] double[] distCoeffs1, int dc1Size,
     [In, Out] double[,] cameraMatrix2,
     [In, Out] double[] distCoeffs2, int dc2Size,
     Size imageSize,
     IntPtr R, IntPtr T,
     IntPtr E, IntPtr F,
     int flags, TermCriteria criteria);
예제 #6
0
        /// <summary>
        /// finds intrinsic and extrinsic camera parameters from several fews of a known calibration pattern.
        /// </summary>
        /// <param name="objectPoints">In the new interface it is a vector of vectors of calibration pattern points in the calibration pattern coordinate space. 
        /// The outer vector contains as many elements as the number of the pattern views. If the same calibration pattern is shown in each view and 
        /// it is fully visible, all the vectors will be the same. Although, it is possible to use partially occluded patterns, or even different patterns 
        /// in different views. Then, the vectors will be different. The points are 3D, but since they are in a pattern coordinate system, then, 
        /// if the rig is planar, it may make sense to put the model to a XY coordinate plane so that Z-coordinate of each input object point is 0.
        /// In the old interface all the vectors of object points from different views are concatenated together.</param>
        /// <param name="imagePoints">In the new interface it is a vector of vectors of the projections of calibration pattern points. 
        /// imagePoints.Count() and objectPoints.Count() and imagePoints[i].Count() must be equal to objectPoints[i].Count() for each i.</param>
        /// <param name="imageSize">Size of the image used only to initialize the intrinsic camera matrix.</param>
        /// <param name="cameraMatrix">Output 3x3 floating-point camera matrix. 
        /// If CV_CALIB_USE_INTRINSIC_GUESS and/or CV_CALIB_FIX_ASPECT_RATIO are specified, some or all of fx, fy, cx, cy must be 
        /// initialized before calling the function.</param>
        /// <param name="distCoeffs">Output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements.</param>
        /// <param name="rvecs">Output vector of rotation vectors (see Rodrigues() ) estimated for each pattern view. That is, each k-th rotation vector 
        /// together with the corresponding k-th translation vector (see the next output parameter description) brings the calibration pattern 
        /// from the model coordinate space (in which object points are specified) to the world coordinate space, that is, a real position of the 
        /// calibration pattern in the k-th pattern view (k=0.. M -1)</param>
        /// <param name="tvecs">Output vector of translation vectors estimated for each pattern view.</param>
        /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param>
        /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param>
        /// <returns></returns>
        public static double CalibrateCamera(
            IEnumerable<IEnumerable<Point3f>> objectPoints,
            IEnumerable<IEnumerable<Point2f>> imagePoints,
            Size imageSize,
            double[,] cameraMatrix,
            double[] distCoeffs,
            out Vec3d[] rvecs,
            out Vec3d[] tvecs,
            CalibrationFlags flags = CalibrationFlags.None,
            TermCriteria? criteria = null)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (cameraMatrix == null)
                throw new ArgumentNullException("cameraMatrix");
            if (distCoeffs == null)
                throw new ArgumentNullException("distCoeffs");

            TermCriteria criteria0 = criteria.GetValueOrDefault(
                new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 30, Double.Epsilon));

            using (var op = new ArrayAddress2<Point3f>(objectPoints))
            using (var ip = new ArrayAddress2<Point2f>(imagePoints))
            using (var rvecsVec = new VectorOfMat())
            using (var tvecsVec = new VectorOfMat())
            {
                double ret = NativeMethods.calib3d_calibrateCamera_vector(
                    op.Pointer, op.Dim1Length, op.Dim2Lengths,
                    ip.Pointer, ip.Dim1Length, ip.Dim2Lengths,
                    imageSize, cameraMatrix, distCoeffs, distCoeffs.Length,
                    rvecsVec.CvPtr, tvecsVec.CvPtr, (int)flags, criteria0);
                Mat[] rvecsM = rvecsVec.ToArray();
                Mat[] tvecsM = tvecsVec.ToArray();
                rvecs = EnumerableEx.SelectToArray(rvecsM, m => m.Get<Vec3d>(0));
                tvecs = EnumerableEx.SelectToArray(tvecsM, m => m.Get<Vec3d>(0));
                return ret;
            }
        }
예제 #7
0
 public static extern void ml_LogisticRegression_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #8
0
 public static extern double calib3d_calibrateCamera_vector(
     IntPtr[] objectPoints, int opSize1, int[] opSize2,
     IntPtr[] imagePoints, int ipSize1, int[] ipSize2,
     Size imageSize,
     [In, Out] double[,] cameraMatrix,
     [In, Out] double[] distCoeffs, int distCoeffsSize,
     IntPtr rvecs, IntPtr tvecs,
     int flags, TermCriteria criteria);
 public static extern ExceptionStatus features2d_BOWKMeansTrainer_new(
     int clusterCount, TermCriteria termcrit, int attempts, int flags, out IntPtr returnValue);
예제 #10
0
 public static extern void video_calcOpticalFlowPyrLK_vector(
     IntPtr prevImg, IntPtr nextImg,
     Point2f[] prevPts, int prevPtsSize,
     IntPtr nextPts, IntPtr status, IntPtr err,
     Size winSize, int maxLevel, TermCriteria criteria,
     int flags, double minEigThreshold);
예제 #11
0
 public static extern void video_calcOpticalFlowPyrLK_vector(
     IntPtr prevImg, IntPtr nextImg,
     Point2f[] prevPts, int prevPtsSize,
     IntPtr nextPts, IntPtr status, IntPtr err,
     Size winSize, int maxLevel, TermCriteria criteria,
     int flags, double minEigThreshold);
 public static extern void ml_ANN_MLP_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #13
0
        /// <summary>
        /// finds intrinsic and extrinsic camera parameters from several fews of a known calibration pattern.
        /// </summary>
        /// <param name="objectPoints">In the new interface it is a vector of vectors of calibration pattern points in the calibration pattern coordinate space. 
        /// The outer vector contains as many elements as the number of the pattern views. If the same calibration pattern is shown in each view and 
        /// it is fully visible, all the vectors will be the same. Although, it is possible to use partially occluded patterns, or even different patterns 
        /// in different views. Then, the vectors will be different. The points are 3D, but since they are in a pattern coordinate system, then, 
        /// if the rig is planar, it may make sense to put the model to a XY coordinate plane so that Z-coordinate of each input object point is 0.
        /// In the old interface all the vectors of object points from different views are concatenated together.</param>
        /// <param name="imagePoints">In the new interface it is a vector of vectors of the projections of calibration pattern points. 
        /// imagePoints.Count() and objectPoints.Count() and imagePoints[i].Count() must be equal to objectPoints[i].Count() for each i.</param>
        /// <param name="imageSize">Size of the image used only to initialize the intrinsic camera matrix.</param>
        /// <param name="cameraMatrix">Output 3x3 floating-point camera matrix. 
        /// If CV_CALIB_USE_INTRINSIC_GUESS and/or CV_CALIB_FIX_ASPECT_RATIO are specified, some or all of fx, fy, cx, cy must be 
        /// initialized before calling the function.</param>
        /// <param name="distCoeffs">Output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements.</param>
        /// <param name="rvecs">Output vector of rotation vectors (see Rodrigues() ) estimated for each pattern view. That is, each k-th rotation vector 
        /// together with the corresponding k-th translation vector (see the next output parameter description) brings the calibration pattern 
        /// from the model coordinate space (in which object points are specified) to the world coordinate space, that is, a real position of the 
        /// calibration pattern in the k-th pattern view (k=0.. M -1)</param>
        /// <param name="tvecs">Output vector of translation vectors estimated for each pattern view.</param>
        /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param>
        /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param>
        /// <returns></returns>
        public static double CalibrateCamera(
            IEnumerable<Mat> objectPoints,
            IEnumerable<Mat> imagePoints,
            Size imageSize,
            InputOutputArray cameraMatrix,
            InputOutputArray distCoeffs,
            out Mat[] rvecs,
            out Mat[] tvecs,
            CalibrationFlags flags = CalibrationFlags.None,
            TermCriteria? criteria = null)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (cameraMatrix == null)
                throw new ArgumentNullException("cameraMatrix");
            if (distCoeffs == null)
                throw new ArgumentNullException("distCoeffs");
            cameraMatrix.ThrowIfNotReady();
            distCoeffs.ThrowIfNotReady();

            TermCriteria criteria0 = criteria.GetValueOrDefault(
                new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 30, Double.Epsilon));

            IntPtr[] objectPointsPtrs = EnumerableEx.SelectPtrs(objectPoints);
            IntPtr[] imagePointsPtrs = EnumerableEx.SelectPtrs(imagePoints);

            double ret;
            using (var rvecsVec = new VectorOfMat())
            using (var tvecsVec = new VectorOfMat())
            {
                ret = NativeMethods.calib3d_calibrateCamera_InputArray(
                    objectPointsPtrs, objectPointsPtrs.Length,
                    imagePointsPtrs, objectPointsPtrs.Length,
                    imageSize, cameraMatrix.CvPtr, distCoeffs.CvPtr,
                    rvecsVec.CvPtr, tvecsVec.CvPtr, (int)flags, criteria0);
                rvecs = rvecsVec.ToArray();
                tvecs = tvecsVec.ToArray();
            }

            cameraMatrix.Fix();
            distCoeffs.Fix();
            return ret;
        }
 public static extern void imgproc_pyrMeanShiftFiltering(IntPtr src, IntPtr dst,
                                                         double sp, double sr, int maxLevel, TermCriteria termcrit);
예제 #15
0
 /// <summary>
 /// Performs initial step of meanshift segmentation of an image.
 /// The source matrix is 8-bit, 3-channel image.
 /// </summary>
 /// <param name="sp">The spatial window radius.</param>
 /// <param name="sr">The color window radius.</param>
 /// <param name="maxLevel">Maximum level of the pyramid for the segmentation.</param>
 /// <param name="termcrit">Termination criteria: when to stop meanshift iterations.</param>
 /// <returns>The destination image of the same format and the same size as the source.</returns>
 public Mat PyrMeanShiftFiltering(double sp, double sr, int maxLevel = 1, TermCriteria? termcrit = null)
 {
     var dst = new Mat();
     Cv2.PyrMeanShiftFiltering(this, dst, sp, sr, maxLevel, termcrit);
     return dst;
 }
예제 #16
0
 /// <summary>
 /// adjusts the corner locations with sub-pixel accuracy to maximize the certain cornerness criteria
 /// </summary>
 /// <param name="inputCorners">Initial coordinates of the input corners and refined coordinates provided for output.</param>
 /// <param name="winSize">Half of the side length of the search window.</param>
 /// <param name="zeroZone">Half of the size of the dead region in the middle of the search zone 
 /// over which the summation in the formula below is not done. It is used sometimes to avoid possible singularities 
 /// of the autocorrelation matrix. The value of (-1,-1) indicates that there is no such a size.</param>
 /// <param name="criteria">Criteria for termination of the iterative process of corner refinement. 
 /// That is, the process of corner position refinement stops either after criteria.maxCount iterations 
 /// or when the corner position moves by less than criteria.epsilon on some iteration.</param>
 /// <returns></returns>
 public Point2f[] CornerSubPix(IEnumerable<Point2f> inputCorners,
     Size winSize, Size zeroZone, TermCriteria criteria)
 {
     return Cv2.CornerSubPix(this, inputCorners, winSize, zeroZone, criteria);
 }
예제 #17
0
 public static extern void imgproc_cornerSubPix(IntPtr image, IntPtr corners,
     Size winSize, Size zeroZone, TermCriteria criteria);
예제 #18
0
 public static extern void imgproc_pyrMeanShiftFiltering(IntPtr src, IntPtr dst,
     double sp, double sr, int maxLevel, TermCriteria termcrit);
예제 #19
0
 public static extern ExceptionStatus video_findTransformECC2(
     IntPtr templateImage, IntPtr inputImage,
     IntPtr warpMatrix, int motionType, TermCriteria criteria,
     IntPtr inputMask, out double returnValue);
예제 #20
0
 public static extern RotatedRect video_CamShift(
     IntPtr probImage, ref Rect window, TermCriteria criteria);
예제 #21
0
        /// <summary>
        /// finds intrinsic and extrinsic parameters of a stereo camera
        /// </summary>
        /// <param name="objectPoints">Vector of vectors of the calibration pattern points.</param>
        /// <param name="imagePoints1">Vector of vectors of the projections of the calibration pattern points, observed by the first camera.</param>
        /// <param name="imagePoints2">Vector of vectors of the projections of the calibration pattern points, observed by the second camera.</param>
        /// <param name="cameraMatrix1">Input/output first camera matrix</param>
        /// <param name="distCoeffs1">Input/output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements. 
        /// The output vector length depends on the flags.</param>
        /// <param name="cameraMatrix2"> Input/output second camera matrix. The parameter is similar to cameraMatrix1 .</param>
        /// <param name="distCoeffs2">Input/output lens distortion coefficients for the second camera. The parameter is similar to distCoeffs1 .</param>
        /// <param name="imageSize">Size of the image used only to initialize intrinsic camera matrix.</param>
        /// <param name="R">Output rotation matrix between the 1st and the 2nd camera coordinate systems.</param>
        /// <param name="T">Output translation vector between the coordinate systems of the cameras.</param>
        /// <param name="E">Output essential matrix.</param>
        /// <param name="F">Output fundamental matrix.</param>
        /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param>
        /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param>
        /// <returns></returns>
        public static double StereoCalibrate(IEnumerable<InputArray> objectPoints,
                                             IEnumerable<InputArray> imagePoints1,
                                             IEnumerable<InputArray> imagePoints2,
                                             InputOutputArray cameraMatrix1, InputOutputArray distCoeffs1,
                                             InputOutputArray cameraMatrix2, InputOutputArray distCoeffs2,
                                             Size imageSize, OutputArray R,
                                             OutputArray T, OutputArray E, OutputArray F,
                                             CalibrationFlags flags = CalibrationFlags.FixIntrinsic,
                                             TermCriteria? criteria = null)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (imagePoints1 == null)
                throw new ArgumentNullException("imagePoints1");
            if (imagePoints2 == null)
                throw new ArgumentNullException("imagePoints2");
            if (cameraMatrix1 == null)
                throw new ArgumentNullException("cameraMatrix1");
            if (distCoeffs1 == null)
                throw new ArgumentNullException("distCoeffs1");
            if (cameraMatrix2 == null)
                throw new ArgumentNullException("cameraMatrix2");
            if (distCoeffs2 == null)
                throw new ArgumentNullException("distCoeffs2");
            cameraMatrix1.ThrowIfDisposed();
            distCoeffs1.ThrowIfDisposed();
            cameraMatrix2.ThrowIfDisposed();
            distCoeffs2.ThrowIfDisposed();
            cameraMatrix1.ThrowIfNotReady();
            cameraMatrix2.ThrowIfNotReady();
            distCoeffs1.ThrowIfNotReady();
            distCoeffs2.ThrowIfNotReady();

            IntPtr[] opPtrs = EnumerableEx.SelectPtrs(objectPoints);
            IntPtr[] ip1Ptrs = EnumerableEx.SelectPtrs(imagePoints1);
            IntPtr[] ip2Ptrs = EnumerableEx.SelectPtrs(imagePoints2);

            TermCriteria criteria0 = criteria.GetValueOrDefault(
                new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 30, 1e-6));

            double result =
                NativeMethods.calib3d_stereoCalibrate_InputArray(
                    opPtrs, opPtrs.Length,
                    ip1Ptrs, ip1Ptrs.Length, ip2Ptrs, ip2Ptrs.Length,
                    cameraMatrix1.CvPtr, distCoeffs1.CvPtr,
                    cameraMatrix2.CvPtr, distCoeffs2.CvPtr,
                    imageSize, ToPtr(R), ToPtr(T), ToPtr(E), ToPtr(F),
                    (int)flags, criteria0);

            cameraMatrix1.Fix();
            distCoeffs1.Fix();
            cameraMatrix2.Fix();
            distCoeffs2.Fix();
            if (R != null)
                R.Fix();
            if (T != null)
                T.Fix();
            if (E != null)
                E.Fix();
            if (F != null)
                F.Fix();

            return result;
        }
 public static extern IntPtr features2d_BOWKMeansTrainer_new(
     int clusterCount, TermCriteria termcrit, int attempts, int flags);
예제 #23
0
        /// <summary>
        /// finds intrinsic and extrinsic parameters of a stereo camera
        /// </summary>
        /// <param name="objectPoints">Vector of vectors of the calibration pattern points.</param>
        /// <param name="imagePoints1">Vector of vectors of the projections of the calibration pattern points, observed by the first camera.</param>
        /// <param name="imagePoints2">Vector of vectors of the projections of the calibration pattern points, observed by the second camera.</param>
        /// <param name="cameraMatrix1">Input/output first camera matrix</param>
        /// <param name="distCoeffs1">Input/output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements. 
        /// The output vector length depends on the flags.</param>
        /// <param name="cameraMatrix2"> Input/output second camera matrix. The parameter is similar to cameraMatrix1 .</param>
        /// <param name="distCoeffs2">Input/output lens distortion coefficients for the second camera. The parameter is similar to distCoeffs1 .</param>
        /// <param name="imageSize">Size of the image used only to initialize intrinsic camera matrix.</param>
        /// <param name="R">Output rotation matrix between the 1st and the 2nd camera coordinate systems.</param>
        /// <param name="T">Output translation vector between the coordinate systems of the cameras.</param>
        /// <param name="E">Output essential matrix.</param>
        /// <param name="F">Output fundamental matrix.</param>
        /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param>
        /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param>
        /// <returns></returns>
        public static double StereoCalibrate(IEnumerable<IEnumerable<Point3d>> objectPoints,
                                             IEnumerable<IEnumerable<Point2d>> imagePoints1,
                                             IEnumerable<IEnumerable<Point2d>> imagePoints2,
                                             double[,] cameraMatrix1, double[] distCoeffs1,
                                             double[,] cameraMatrix2, double[] distCoeffs2,
                                             Size imageSize, OutputArray R,
                                             OutputArray T, OutputArray E, OutputArray F,
                                             CalibrationFlags flags = CalibrationFlags.FixIntrinsic,
                                             TermCriteria? criteria = null)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (imagePoints1 == null)
                throw new ArgumentNullException("imagePoints1");
            if (imagePoints2 == null)
                throw new ArgumentNullException("imagePoints2");
            if (cameraMatrix1 == null)
                throw new ArgumentNullException("cameraMatrix1");
            if (distCoeffs1 == null)
                throw new ArgumentNullException("distCoeffs1");
            if (cameraMatrix2 == null)
                throw new ArgumentNullException("cameraMatrix2");
            if (distCoeffs2 == null)
                throw new ArgumentNullException("distCoeffs2");

            TermCriteria criteria0 = criteria.GetValueOrDefault(
                new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 30, 1e-6));

            using (var op = new ArrayAddress2<Point3d>(objectPoints))
            using (var ip1 = new ArrayAddress2<Point2d>(imagePoints1))
            using (var ip2 = new ArrayAddress2<Point2d>(imagePoints2))
            {
                return NativeMethods.calib3d_stereoCalibrate_array(
                        op.Pointer, op.Dim1Length, op.Dim2Lengths,
                        ip1.Pointer, ip1.Dim1Length, ip1.Dim2Lengths,
                        ip2.Pointer, ip2.Dim1Length, ip2.Dim2Lengths,
                        cameraMatrix1, distCoeffs1, distCoeffs1.Length,
                        cameraMatrix2, distCoeffs2, distCoeffs2.Length,
                        imageSize, ToPtr(R), ToPtr(T), ToPtr(E), ToPtr(F),
                        (int)flags, criteria0);
            }
        }
 public static extern IntPtr features2d_BOWKMeansTrainer_new(
     int clusterCount, TermCriteria termcrit, int attempts, int flags);
 public static extern ExceptionStatus ml_ANN_MLP_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #26
0
 public static extern ExceptionStatus ml_LogisticRegression_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #27
0
 /// <summary>
 /// Performs initial step of meanshift segmentation of an image.
 /// </summary>
 /// <param name="src">The source 8-bit, 3-channel image.</param>
 /// <param name="dst">The destination image of the same format and the same size as the source.</param>
 /// <param name="sp">The spatial window radius.</param>
 /// <param name="sr">The color window radius.</param>
 /// <param name="maxLevel">Maximum level of the pyramid for the segmentation.</param>
 /// <param name="termcrit">Termination criteria: when to stop meanshift iterations.</param>
 public static void PyrMeanShiftFiltering(InputArray src, OutputArray dst,
     double sp, double sr, int maxLevel = 1, TermCriteria? termcrit = null)
 {
     if (src == null)
         throw new ArgumentNullException(nameof(src));
     if (dst == null)
         throw new ArgumentNullException(nameof(dst));
     src.ThrowIfDisposed();
     dst.ThrowIfNotReady();
     TermCriteria termcrit0 = termcrit.GetValueOrDefault(
         new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 5, 1));
     NativeMethods.imgproc_pyrMeanShiftFiltering(src.CvPtr, dst.CvPtr, sp, sr, maxLevel, termcrit0);
     GC.KeepAlive(src);
     dst.Fix();
 }
 public static extern void ml_RTrees_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #29
0
        /// <summary>
        /// adjusts the corner locations with sub-pixel accuracy to maximize the certain cornerness criteria
        /// </summary>
        /// <param name="image">Input image.</param>
        /// <param name="inputCorners">Initial coordinates of the input corners and refined coordinates provided for output.</param>
        /// <param name="winSize">Half of the side length of the search window.</param>
        /// <param name="zeroZone">Half of the size of the dead region in the middle of the search zone 
        /// over which the summation in the formula below is not done. It is used sometimes to avoid possible singularities 
        /// of the autocorrelation matrix. The value of (-1,-1) indicates that there is no such a size.</param>
        /// <param name="criteria">Criteria for termination of the iterative process of corner refinement. 
        /// That is, the process of corner position refinement stops either after criteria.maxCount iterations 
        /// or when the corner position moves by less than criteria.epsilon on some iteration.</param>
        /// <returns></returns>
        public static Point2f[] CornerSubPix(InputArray image, IEnumerable<Point2f> inputCorners,
            Size winSize, Size zeroZone, TermCriteria criteria)
        {
            if (image == null)
                throw new ArgumentNullException(nameof(image));
            if (inputCorners == null)
                throw new ArgumentNullException(nameof(inputCorners));
            image.ThrowIfDisposed();

            var inputCornersSrc = EnumerableEx.ToArray(inputCorners);
            var inputCornersCopy = new Point2f[inputCornersSrc.Length];
            Array.Copy(inputCornersSrc, inputCornersCopy, inputCornersSrc.Length);
            using (var vector = new VectorOfPoint2f(inputCornersCopy))
            {
                NativeMethods.imgproc_cornerSubPix(image.CvPtr, vector.CvPtr, winSize, zeroZone, criteria);
                GC.KeepAlive(image);
                return vector.ToArray();
            }
        }
예제 #30
0
 public static extern double calib3d_calibrateCamera_InputArray(
     IntPtr[] objectPoints, int objectPointsSize,
     IntPtr[] imagePoints, int imagePointsSize,
     Size imageSize,
     IntPtr cameraMatrix,IntPtr distCoeffs,
     IntPtr rvecs, IntPtr tvecs,
     int flags, TermCriteria criteria);
예제 #31
0
 public static extern ExceptionStatus video_meanShift(
     IntPtr probImage, ref Rect window, TermCriteria criteria, out int returnValue);
예제 #32
0
 public static extern double calib3d_stereoCalibrate_InputArray(
     IntPtr[] objectPoints, int opSize,
     IntPtr[] imagePoints1, int ip1Size,
     IntPtr[] imagePoints2, int ip2Size,
     IntPtr cameraMatrix1,
     IntPtr distCoeffs1,
     IntPtr cameraMatrix2,
     IntPtr distCoeffs2,
     Size imageSize,
     IntPtr R, IntPtr T,
     IntPtr E, IntPtr F,
     int flags, TermCriteria criteria);
예제 #33
0
 public static extern ExceptionStatus video_calcOpticalFlowPyrLK_InputArray(
     IntPtr prevImg, IntPtr nextImg,
     IntPtr prevPts, IntPtr nextPts,
     IntPtr status, IntPtr err,
     Size winSize, int maxLevel, TermCriteria criteria,
     int flags, double minEigThreshold);
 public static extern void ml_LogisticRegression_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #35
0
 public static extern void cuda_imgproc_meanShiftProc(IntPtr src, IntPtr dstr, IntPtr dstsp, int sp, int sr, TermCriteria criteria, IntPtr stream);
예제 #36
0
 public static extern ExceptionStatus core_kmeans(
     IntPtr data, int k, IntPtr bestLabels,
     TermCriteria criteria, int attempts, int flags, IntPtr centers,
     out double returnValue);
예제 #37
0
 public static extern void cuda_imgproc_meanShiftSegmentation(IntPtr src, IntPtr dst, int sp, int sr, int minsize, TermCriteria criteria, IntPtr stream);
 public static extern void imgproc_cornerSubPix(IntPtr image, IntPtr corners,
                                                Size winSize, Size zeroZone, TermCriteria criteria);
 public static extern void ml_ANN_MLP_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #40
0
 public static extern int video_meanShift(
     IntPtr probImage, ref Rect window, TermCriteria criteria);
예제 #41
0
 public static extern ExceptionStatus ml_RTrees_getTermCriteria(IntPtr obj, out TermCriteria returnValue);
예제 #42
0
 public static extern void ml_SVM_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #43
0
 public static extern ExceptionStatus calib3d_undistortPointsIter(
     IntPtr src, IntPtr dst,
     IntPtr cameraMatrix, IntPtr distCoeffs,
     IntPtr R, IntPtr P, TermCriteria criteria);
예제 #44
0
 public static extern ExceptionStatus ml_RTrees_setTermCriteria(IntPtr obj, TermCriteria val);
예제 #45
0
        /// <summary>
        /// computes sparse optical flow using multi-scale Lucas-Kanade algorithm
        /// </summary>
        /// <param name="prevImg"></param>
        /// <param name="nextImg"></param>
        /// <param name="prevPts"></param>
        /// <param name="nextPts"></param>
        /// <param name="status"></param>
        /// <param name="err"></param>
        /// <param name="winSize"></param>
        /// <param name="maxLevel"></param>
        /// <param name="criteria"></param>
        /// <param name="flags"></param>
        /// <param name="minEigThreshold"></param>
        public static void CalcOpticalFlowPyrLK(
            InputArray prevImg, InputArray nextImg,
            InputArray prevPts, InputOutputArray nextPts,
            OutputArray status, OutputArray err,
            Size? winSize = null,
            int maxLevel = 3,
            TermCriteria? criteria = null,
            OpticalFlowFlags flags = OpticalFlowFlags.None,
            double minEigThreshold = 1e-4)
        {
            if (prevImg == null)
                throw new ArgumentNullException("prevImg");
            if (nextImg == null)
                throw new ArgumentNullException("nextImg");
            if (prevPts == null)
                throw new ArgumentNullException("prevPts");
            if (nextPts == null)
                throw new ArgumentNullException("nextPts");
            if (status == null)
                throw new ArgumentNullException("status");
            if (err == null)
                throw new ArgumentNullException("err");
            prevImg.ThrowIfDisposed();
            nextImg.ThrowIfDisposed();
            prevPts.ThrowIfDisposed();
            nextPts.ThrowIfNotReady();
            status.ThrowIfNotReady();
            err.ThrowIfNotReady();

            Size winSize0 = winSize.GetValueOrDefault(new Size(21, 21));
            TermCriteria criteria0 = criteria.GetValueOrDefault(
                TermCriteria.Both(30, 0.01));

            NativeMethods.video_calcOpticalFlowPyrLK_InputArray(
                prevImg.CvPtr, nextImg.CvPtr, prevPts.CvPtr, nextPts.CvPtr,
                status.CvPtr, err.CvPtr, winSize0,maxLevel,
                criteria0, (int)flags, minEigThreshold);

            nextPts.Fix();
            status.Fix();
            err.Fix();
        }
예제 #46
0
 public static extern double core_kmeans(IntPtr data, int k, IntPtr bestLabels,
                                         TermCriteria criteria, int attempts, int flags, IntPtr centers);
예제 #47
0
        /// <summary>
        /// computes sparse optical flow using multi-scale Lucas-Kanade algorithm
        /// </summary>
        /// <param name="prevImg"></param>
        /// <param name="nextImg"></param>
        /// <param name="prevPts"></param>
        /// <param name="nextPts"></param>
        /// <param name="status"></param>
        /// <param name="err"></param>
        /// <param name="winSize"></param>
        /// <param name="maxLevel"></param>
        /// <param name="criteria"></param>
        /// <param name="flags"></param>
        /// <param name="minEigThreshold"></param>
        public static void CalcOpticalFlowPyrLK(
            InputArray prevImg, InputArray nextImg,
            Point2f[] prevPts, ref Point2f[] nextPts,
            out byte[] status, out float[] err,
            Size? winSize = null,
            int maxLevel = 3,
            TermCriteria? criteria = null,
            OpticalFlowFlags flags = OpticalFlowFlags.None,
            double minEigThreshold = 1e-4)
        {
            if (prevImg == null)
                throw new ArgumentNullException("prevImg");
            if (nextImg == null)
                throw new ArgumentNullException("nextImg");
            if (prevPts == null)
                throw new ArgumentNullException("prevPts");
            if (nextPts == null)
                throw new ArgumentNullException("nextPts");
            prevImg.ThrowIfDisposed();
            nextImg.ThrowIfDisposed();

            Size winSize0 = winSize.GetValueOrDefault(new Size(21, 21));
            TermCriteria criteria0 = criteria.GetValueOrDefault(
                TermCriteria.Both(30, 0.01));

            using (var nextPtsVec = new VectorOfPoint2f())
            using (var statusVec = new VectorOfByte())
            using (var errVec = new VectorOfFloat())
            {
                NativeMethods.video_calcOpticalFlowPyrLK_vector(
                    prevImg.CvPtr, nextImg.CvPtr, prevPts, prevPts.Length,
                    nextPtsVec.CvPtr, statusVec.CvPtr, errVec.CvPtr, 
                    winSize0, maxLevel, criteria0, (int)flags, minEigThreshold);
                nextPts = nextPtsVec.ToArray();
                status = statusVec.ToArray();
                err = errVec.ToArray();
            }
        }
예제 #48
0
 /// <summary>
 /// 
 /// </summary>
 /// <param name="clusterCount"></param>
 /// <param name="termcrit"></param>
 /// <param name="attempts"></param>
 /// <param name="flags"></param>
 public BOWKMeansTrainer(int clusterCount, TermCriteria? termcrit = null,
     int attempts = 3, KMeansFlags flags = KMeansFlags.PpCenters)
 {
     var termCritValue = termcrit.GetValueOrDefault(new TermCriteria());
     ptr = NativeMethods.features2d_BOWKMeansTrainer_new(clusterCount, termCritValue, attempts, (int)flags);
 }
예제 #49
0
 public static extern RotatedRect video_CamShift(
     IntPtr probImage, ref Rect window, TermCriteria criteria);
예제 #50
0
 public static extern double core_kmeans(IntPtr data, int k, IntPtr bestLabels,
     TermCriteria criteria, int attempts, int flags, IntPtr centers);
예제 #51
0
 public static extern int video_meanShift(
     IntPtr probImage, ref Rect window, TermCriteria criteria);
예제 #52
0
 /// <summary>
 /// clusters the input data using k-Means algorithm
 /// </summary>
 /// <param name="data"></param>
 /// <param name="k"></param>
 /// <param name="bestLabels"></param>
 /// <param name="criteria"></param>
 /// <param name="attempts"></param>
 /// <param name="flags"></param>
 /// <param name="centers"></param>
 /// <returns></returns>
 public static double Kmeans(InputArray data, int k, InputOutputArray bestLabels,
     TermCriteria criteria, int attempts, KMeansFlags flags, OutputArray centers = null)
 {
     if (data == null)
         throw new ArgumentNullException("data");
     if (bestLabels == null)
         throw new ArgumentNullException("bestLabels");
     data.ThrowIfDisposed();
     bestLabels.ThrowIfDisposed();
     double ret = NativeMethods.core_kmeans(data.CvPtr, k, bestLabels.CvPtr, criteria, attempts, (int)flags, ToPtr(centers));
     bestLabels.Fix();
     if(centers != null)
         centers.Fix();
     GC.KeepAlive(data); 
     return ret;
 }
예제 #53
0
 public static extern ExceptionStatus ml_LogisticRegression_getTermCriteria(IntPtr obj, out TermCriteria returnValue);