Example #1
0
// ReSharper restore InconsistentNaming
#pragma warning restore 1591
        #endregion

        #region Init and Disposal

#if LANG_JP
        /// <summary>
        /// 初期化
        /// </summary>
        /// <param name="nClusters"></param>
        /// <param name="covMatType"></param>
        /// <param name="termCrit"></param>
#else
        /// <summary>
        /// Training constructor
        /// </summary>
        /// <param name="nClusters"></param>
        /// <param name="covMatType"></param>
        /// <param name="termCrit"></param>
#endif
        public EM(
            int nClusters = DEFAULT_NCLUSTERS, 
            EMCovMatType covMatType = EMCovMatType.Diagonal, 
            TermCriteria? termCrit = null)
        {
            var termCrit0 = termCrit.GetValueOrDefault(
                TermCriteria.Both(DEFAULT_MAX_ITERS, Double.Epsilon));
            ptr = NativeMethods.ml_EM_new(nClusters, (int)covMatType, termCrit0);
        }
Example #2
0
        /// <summary>
        /// Finds an object center, size, and orientation.
        /// </summary>
        /// <param name="probImage">Back projection of the object histogram. </param>
        /// <param name="window">Initial search window.</param>
        /// <param name="criteria">Stop criteria for the underlying MeanShift() .</param>
        /// <returns></returns>
        public static RotatedRect CamShift(
            InputArray probImage, ref Rect window, TermCriteria criteria)
        {
            if (probImage == null)
            {
                throw new ArgumentNullException(nameof(probImage));
            }
            probImage.ThrowIfDisposed();

            CvRect      window0 = window;
            RotatedRect result  = NativeMethods.video_CamShift(
                probImage.CvPtr, ref window0, criteria);

            window = window0;
            return(result);
        }
Example #3
0
        /// <summary>
        /// Finds an object on a back projection image.
        /// </summary>
        /// <param name="probImage">Back projection of the object histogram.</param>
        /// <param name="window">Initial search window.</param>
        /// <param name="criteria">Stop criteria for the iterative search algorithm.</param>
        /// <returns>Number of iterations CAMSHIFT took to converge.</returns>
        public static int MeanShift(
            InputArray probImage, ref Rect window, TermCriteria criteria)
        {
            if (probImage == null)
            {
                throw new ArgumentNullException("probImage");
            }
            probImage.ThrowIfDisposed();

            CvRect window0 = window;
            int    result  = NativeMethods.video_meanShift(
                probImage.CvPtr, ref window0, criteria);

            window = window0;
            return(result);
        }
Example #4
0
        /// <summary>
        /// computes sparse optical flow using multi-scale Lucas-Kanade algorithm
        /// </summary>
        /// <param name="prevImg"></param>
        /// <param name="nextImg"></param>
        /// <param name="prevPts"></param>
        /// <param name="nextPts"></param>
        /// <param name="status"></param>
        /// <param name="err"></param>
        /// <param name="winSize"></param>
        /// <param name="maxLevel"></param>
        /// <param name="criteria"></param>
        /// <param name="flags"></param>
        /// <param name="minEigThreshold"></param>
        public static void CalcOpticalFlowPyrLK(
            InputArray prevImg, InputArray nextImg,
            Point2f[] prevPts, ref Point2f[] nextPts,
            out byte[] status, out float[] err,
            Size?winSize           = null,
            int maxLevel           = 3,
            TermCriteria?criteria  = null,
            OpticalFlowFlags flags = OpticalFlowFlags.None,
            double minEigThreshold = 1e-4)
        {
            if (prevImg == null)
            {
                throw new ArgumentNullException("prevImg");
            }
            if (nextImg == null)
            {
                throw new ArgumentNullException("nextImg");
            }
            if (prevPts == null)
            {
                throw new ArgumentNullException("prevPts");
            }
            if (nextPts == null)
            {
                throw new ArgumentNullException("nextPts");
            }
            prevImg.ThrowIfDisposed();
            nextImg.ThrowIfDisposed();

            Size         winSize0  = winSize.GetValueOrDefault(new Size(21, 21));
            TermCriteria criteria0 = criteria.GetValueOrDefault(
                TermCriteria.Both(30, 0.01));

            using (var nextPtsVec = new VectorOfPoint2f())
                using (var statusVec = new VectorOfByte())
                    using (var errVec = new VectorOfFloat())
                    {
                        NativeMethods.video_calcOpticalFlowPyrLK_vector(
                            prevImg.CvPtr, nextImg.CvPtr, prevPts, prevPts.Length,
                            nextPtsVec.CvPtr, statusVec.CvPtr, errVec.CvPtr,
                            winSize0, maxLevel, criteria0, (int)flags, minEigThreshold);
                        nextPts = nextPtsVec.ToArray();
                        status  = statusVec.ToArray();
                        err     = errVec.ToArray();
                    }
        }
Example #5
0
        /// <summary>
        /// finds intrinsic and extrinsic parameters of a stereo camera
        /// </summary>
        /// <param name="objectPoints">Vector of vectors of the calibration pattern points.</param>
        /// <param name="imagePoints1">Vector of vectors of the projections of the calibration pattern points, observed by the first camera.</param>
        /// <param name="imagePoints2">Vector of vectors of the projections of the calibration pattern points, observed by the second camera.</param>
        /// <param name="cameraMatrix1">Input/output first camera matrix</param>
        /// <param name="distCoeffs1">Input/output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements. 
        /// The output vector length depends on the flags.</param>
        /// <param name="cameraMatrix2"> Input/output second camera matrix. The parameter is similar to cameraMatrix1 .</param>
        /// <param name="distCoeffs2">Input/output lens distortion coefficients for the second camera. The parameter is similar to distCoeffs1 .</param>
        /// <param name="imageSize">Size of the image used only to initialize intrinsic camera matrix.</param>
        /// <param name="R">Output rotation matrix between the 1st and the 2nd camera coordinate systems.</param>
        /// <param name="T">Output translation vector between the coordinate systems of the cameras.</param>
        /// <param name="E">Output essential matrix.</param>
        /// <param name="F">Output fundamental matrix.</param>
        /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param>
        /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param>
        /// <returns></returns>
        public static double StereoCalibrate(IEnumerable<IEnumerable<Point3d>> objectPoints,
                                             IEnumerable<IEnumerable<Point2d>> imagePoints1,
                                             IEnumerable<IEnumerable<Point2d>> imagePoints2,
                                             double[,] cameraMatrix1, double[] distCoeffs1,
                                             double[,] cameraMatrix2, double[] distCoeffs2,
                                             Size imageSize, OutputArray R,
                                             OutputArray T, OutputArray E, OutputArray F,
                                             TermCriteria? criteria = null,
                                             CalibrationFlag flags = CalibrationFlag.FixIntrinsic)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (imagePoints1 == null)
                throw new ArgumentNullException("imagePoints1");
            if (imagePoints2 == null)
                throw new ArgumentNullException("imagePoints2");
            if (cameraMatrix1 == null)
                throw new ArgumentNullException("cameraMatrix1");
            if (distCoeffs1 == null)
                throw new ArgumentNullException("distCoeffs1");
            if (cameraMatrix2 == null)
                throw new ArgumentNullException("cameraMatrix2");
            if (distCoeffs2 == null)
                throw new ArgumentNullException("distCoeffs2");

            TermCriteria criteria0 = criteria.GetValueOrDefault(
                new TermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, 30, 1e-6));

            using (var op = new ArrayAddress2<Point3d>(objectPoints))
            using (var ip1 = new ArrayAddress2<Point2d>(imagePoints1))
            using (var ip2 = new ArrayAddress2<Point2d>(imagePoints2))
            {
                return NativeMethods.calib3d_stereoCalibrate_array(
                        op.Pointer, op.Dim1Length, op.Dim2Lengths,
                        ip1.Pointer, ip1.Dim1Length, ip1.Dim2Lengths,
                        ip2.Pointer, ip2.Dim1Length, ip2.Dim2Lengths,
                        cameraMatrix1, distCoeffs1, distCoeffs1.Length,
                        cameraMatrix2, distCoeffs2, distCoeffs2.Length,
                        imageSize, ToPtr(R), ToPtr(T), ToPtr(E), ToPtr(F),
                        criteria0, (int)flags);
            }
        }
Example #6
0
        /// <summary>
        /// finds intrinsic and extrinsic parameters of a stereo camera
        /// </summary>
        /// <param name="objectPoints">Vector of vectors of the calibration pattern points.</param>
        /// <param name="imagePoints1">Vector of vectors of the projections of the calibration pattern points, observed by the first camera.</param>
        /// <param name="imagePoints2">Vector of vectors of the projections of the calibration pattern points, observed by the second camera.</param>
        /// <param name="cameraMatrix1">Input/output first camera matrix</param>
        /// <param name="distCoeffs1">Input/output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements. 
        /// The output vector length depends on the flags.</param>
        /// <param name="cameraMatrix2"> Input/output second camera matrix. The parameter is similar to cameraMatrix1 .</param>
        /// <param name="distCoeffs2">Input/output lens distortion coefficients for the second camera. The parameter is similar to distCoeffs1 .</param>
        /// <param name="imageSize">Size of the image used only to initialize intrinsic camera matrix.</param>
        /// <param name="R">Output rotation matrix between the 1st and the 2nd camera coordinate systems.</param>
        /// <param name="T">Output translation vector between the coordinate systems of the cameras.</param>
        /// <param name="E">Output essential matrix.</param>
        /// <param name="F">Output fundamental matrix.</param>
        /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param>
        /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param>
        /// <returns></returns>
        public static double StereoCalibrate(IEnumerable<InputArray> objectPoints,
                                             IEnumerable<InputArray> imagePoints1,
                                             IEnumerable<InputArray> imagePoints2,
                                             InputOutputArray cameraMatrix1, InputOutputArray distCoeffs1,
                                             InputOutputArray cameraMatrix2, InputOutputArray distCoeffs2,
                                             Size imageSize, OutputArray R,
                                             OutputArray T, OutputArray E, OutputArray F,
                                             TermCriteria? criteria = null,
                                             CalibrationFlag flags = CalibrationFlag.FixIntrinsic)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (imagePoints1 == null)
                throw new ArgumentNullException("imagePoints1");
            if (imagePoints2 == null)
                throw new ArgumentNullException("imagePoints2");
            if (cameraMatrix1 == null)
                throw new ArgumentNullException("cameraMatrix1");
            if (distCoeffs1 == null)
                throw new ArgumentNullException("distCoeffs1");
            if (cameraMatrix2 == null)
                throw new ArgumentNullException("cameraMatrix2");
            if (distCoeffs2 == null)
                throw new ArgumentNullException("distCoeffs2");
            cameraMatrix1.ThrowIfDisposed();
            distCoeffs1.ThrowIfDisposed();
            cameraMatrix2.ThrowIfDisposed();
            distCoeffs2.ThrowIfDisposed();
            cameraMatrix1.ThrowIfNotReady();
            cameraMatrix2.ThrowIfNotReady();
            distCoeffs1.ThrowIfNotReady();
            distCoeffs2.ThrowIfNotReady();

            IntPtr[] opPtrs = EnumerableEx.SelectPtrs(objectPoints);
            IntPtr[] ip1Ptrs = EnumerableEx.SelectPtrs(imagePoints1);
            IntPtr[] ip2Ptrs = EnumerableEx.SelectPtrs(imagePoints2);

            TermCriteria criteria0 = criteria.GetValueOrDefault(
                new TermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, 30, 1e-6));

            double result =
                NativeMethods.calib3d_stereoCalibrate_InputArray(
                    opPtrs, opPtrs.Length,
                    ip1Ptrs, ip1Ptrs.Length, ip2Ptrs, ip2Ptrs.Length,
                    cameraMatrix1.CvPtr, distCoeffs1.CvPtr,
                    cameraMatrix2.CvPtr, distCoeffs2.CvPtr,
                    imageSize, ToPtr(R), ToPtr(T), ToPtr(E), ToPtr(F),
                    criteria0, (int)flags
                    );

            cameraMatrix1.Fix();
            distCoeffs1.Fix();
            cameraMatrix2.Fix();
            distCoeffs2.Fix();
            if (R != null)
                R.Fix();
            if (T != null)
                T.Fix();
            if (E != null)
                E.Fix();
            if (F != null)
                F.Fix();

            return result;
        }
Example #7
0
        /// <summary>
        /// finds intrinsic and extrinsic camera parameters from several fews of a known calibration pattern.
        /// </summary>
        /// <param name="objectPoints">In the new interface it is a vector of vectors of calibration pattern points in the calibration pattern coordinate space. 
        /// The outer vector contains as many elements as the number of the pattern views. If the same calibration pattern is shown in each view and 
        /// it is fully visible, all the vectors will be the same. Although, it is possible to use partially occluded patterns, or even different patterns 
        /// in different views. Then, the vectors will be different. The points are 3D, but since they are in a pattern coordinate system, then, 
        /// if the rig is planar, it may make sense to put the model to a XY coordinate plane so that Z-coordinate of each input object point is 0.
        /// In the old interface all the vectors of object points from different views are concatenated together.</param>
        /// <param name="imagePoints">In the new interface it is a vector of vectors of the projections of calibration pattern points. 
        /// imagePoints.Count() and objectPoints.Count() and imagePoints[i].Count() must be equal to objectPoints[i].Count() for each i.</param>
        /// <param name="imageSize">Size of the image used only to initialize the intrinsic camera matrix.</param>
        /// <param name="cameraMatrix">Output 3x3 floating-point camera matrix. 
        /// If CV_CALIB_USE_INTRINSIC_GUESS and/or CV_CALIB_FIX_ASPECT_RATIO are specified, some or all of fx, fy, cx, cy must be 
        /// initialized before calling the function.</param>
        /// <param name="distCoeffs">Output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements.</param>
        /// <param name="rvecs">Output vector of rotation vectors (see Rodrigues() ) estimated for each pattern view. That is, each k-th rotation vector 
        /// together with the corresponding k-th translation vector (see the next output parameter description) brings the calibration pattern 
        /// from the model coordinate space (in which object points are specified) to the world coordinate space, that is, a real position of the 
        /// calibration pattern in the k-th pattern view (k=0.. M -1)</param>
        /// <param name="tvecs">Output vector of translation vectors estimated for each pattern view.</param>
        /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param>
        /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param>
        /// <returns></returns>
        public static double CalibrateCamera(
            IEnumerable<IEnumerable<Point3d>> objectPoints,
            IEnumerable<IEnumerable<Point2d>> imagePoints,
            Size imageSize,
            double[,] cameraMatrix,
            double[] distCoeffs,
            out Vec3d[] rvecs,
            out Vec3d[] tvecs,
            CalibrationFlag flags = CalibrationFlag.Zero,
            TermCriteria? criteria = null)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (cameraMatrix == null)
                throw new ArgumentNullException("cameraMatrix");
            if (distCoeffs == null)
                throw new ArgumentNullException("distCoeffs");

            TermCriteria criteria0 = criteria.GetValueOrDefault(
                new TermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, 30, Double.Epsilon));

            using (var op = new ArrayAddress2<Point3d>(objectPoints))
            using (var ip = new ArrayAddress2<Point2d>(imagePoints))
            using (var rvecsVec = new VectorOfMat())
            using (var tvecsVec = new VectorOfMat())
            {
                double ret = NativeMethods.calib3d_calibrateCamera_vector(
                    op.Pointer, op.Dim1Length, op.Dim2Lengths,
                    ip.Pointer, ip.Dim1Length, ip.Dim2Lengths,
                    imageSize, cameraMatrix, distCoeffs, distCoeffs.Length,
                    rvecsVec.CvPtr, tvecsVec.CvPtr, (int)flags, criteria0);
                Mat[] rvecsM = rvecsVec.ToArray();
                Mat[] tvecsM = tvecsVec.ToArray();
                rvecs = EnumerableEx.SelectToArray(rvecsM, m => m.Get<Vec3d>(0));
                tvecs = EnumerableEx.SelectToArray(tvecsM, m => m.Get<Vec3d>(0));
                return ret;
            }
        }
Example #8
0
        /// <summary>
        /// finds intrinsic and extrinsic camera parameters from several fews of a known calibration pattern.
        /// </summary>
        /// <param name="objectPoints">In the new interface it is a vector of vectors of calibration pattern points in the calibration pattern coordinate space. 
        /// The outer vector contains as many elements as the number of the pattern views. If the same calibration pattern is shown in each view and 
        /// it is fully visible, all the vectors will be the same. Although, it is possible to use partially occluded patterns, or even different patterns 
        /// in different views. Then, the vectors will be different. The points are 3D, but since they are in a pattern coordinate system, then, 
        /// if the rig is planar, it may make sense to put the model to a XY coordinate plane so that Z-coordinate of each input object point is 0.
        /// In the old interface all the vectors of object points from different views are concatenated together.</param>
        /// <param name="imagePoints">In the new interface it is a vector of vectors of the projections of calibration pattern points. 
        /// imagePoints.Count() and objectPoints.Count() and imagePoints[i].Count() must be equal to objectPoints[i].Count() for each i.</param>
        /// <param name="imageSize">Size of the image used only to initialize the intrinsic camera matrix.</param>
        /// <param name="cameraMatrix">Output 3x3 floating-point camera matrix. 
        /// If CV_CALIB_USE_INTRINSIC_GUESS and/or CV_CALIB_FIX_ASPECT_RATIO are specified, some or all of fx, fy, cx, cy must be 
        /// initialized before calling the function.</param>
        /// <param name="distCoeffs">Output vector of distortion coefficients (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements.</param>
        /// <param name="rvecs">Output vector of rotation vectors (see Rodrigues() ) estimated for each pattern view. That is, each k-th rotation vector 
        /// together with the corresponding k-th translation vector (see the next output parameter description) brings the calibration pattern 
        /// from the model coordinate space (in which object points are specified) to the world coordinate space, that is, a real position of the 
        /// calibration pattern in the k-th pattern view (k=0.. M -1)</param>
        /// <param name="tvecs">Output vector of translation vectors estimated for each pattern view.</param>
        /// <param name="flags">Different flags that may be zero or a combination of the CalibrationFlag values</param>
        /// <param name="criteria">Termination criteria for the iterative optimization algorithm.</param>
        /// <returns></returns>
        public static double CalibrateCamera(
            IEnumerable<Mat> objectPoints,
            IEnumerable<Mat> imagePoints,
            Size imageSize,
            InputOutputArray cameraMatrix,
            InputOutputArray distCoeffs,
            out Mat[] rvecs, 
            out Mat[] tvecs,
            CalibrationFlag flags = CalibrationFlag.Zero, 
            TermCriteria? criteria = null)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (cameraMatrix == null)
                throw new ArgumentNullException("cameraMatrix");
            if (distCoeffs == null)
                throw new ArgumentNullException("distCoeffs");
            cameraMatrix.ThrowIfNotReady();
            distCoeffs.ThrowIfNotReady();

            TermCriteria criteria0 = criteria.GetValueOrDefault(
                new TermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, 30, Double.Epsilon));

            IntPtr[] objectPointsPtrs = EnumerableEx.SelectPtrs(objectPoints);
            IntPtr[] imagePointsPtrs = EnumerableEx.SelectPtrs(imagePoints);

            double ret;
            using (var rvecsVec = new VectorOfMat())
            using (var tvecsVec = new VectorOfMat())
            {
                ret = NativeMethods.calib3d_calibrateCamera_InputArray(
                    objectPointsPtrs, objectPointsPtrs.Length,
                    imagePointsPtrs, objectPointsPtrs.Length,
                    imageSize, cameraMatrix.CvPtr, distCoeffs.CvPtr,
                    rvecsVec.CvPtr, tvecsVec.CvPtr, (int)flags, criteria0);
                rvecs = rvecsVec.ToArray();
                tvecs = tvecsVec.ToArray();
            }

            cameraMatrix.Fix();
            distCoeffs.Fix();
            return ret;
        }
 /// <summary>
 /// Performs initial step of meanshift segmentation of an image.
 /// </summary>
 /// <param name="src">The source 8-bit, 3-channel image.</param>
 /// <param name="dst">The destination image of the same format and the same size as the source.</param>
 /// <param name="sp">The spatial window radius.</param>
 /// <param name="sr">The color window radius.</param>
 /// <param name="maxLevel">Maximum level of the pyramid for the segmentation.</param>
 /// <param name="termcrit">Termination criteria: when to stop meanshift iterations.</param>
 public static void PyrMeanShiftFiltering(InputArray src, OutputArray dst,
     double sp, double sr, int maxLevel = 1, TermCriteria? termcrit = null)
 {
     if (src == null)
         throw new ArgumentNullException("src");
     if (dst == null)
         throw new ArgumentNullException("dst");
     src.ThrowIfDisposed();
     dst.ThrowIfNotReady();
     TermCriteria termcrit0 = termcrit.GetValueOrDefault(
         new TermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, 5, 1));
     NativeMethods.imgproc_pyrMeanShiftFiltering(src.CvPtr, dst.CvPtr, sp, sr, maxLevel, termcrit0);
     dst.Fix();
 }
Example #10
0
        /// <summary>
        /// computes sparse optical flow using multi-scale Lucas-Kanade algorithm
        /// </summary>
        /// <param name="prevImg"></param>
        /// <param name="nextImg"></param>
        /// <param name="prevPts"></param>
        /// <param name="nextPts"></param>
        /// <param name="status"></param>
        /// <param name="err"></param>
        /// <param name="winSize"></param>
        /// <param name="maxLevel"></param>
        /// <param name="criteria"></param>
        /// <param name="flags"></param>
        /// <param name="minEigThreshold"></param>
        public static void CalcOpticalFlowPyrLK(
            InputArray prevImg, InputArray nextImg,
            Point2f[] prevPts, ref Point2f[] nextPts,
            out byte[] status, out float[] err,
            Size? winSize = null,
            int maxLevel = 3,
            TermCriteria? criteria = null,
            OpticalFlowFlags flags = OpticalFlowFlags.None,
            double minEigThreshold = 1e-4)
        {
            if (prevImg == null)
                throw new ArgumentNullException("prevImg");
            if (nextImg == null)
                throw new ArgumentNullException("nextImg");
            if (prevPts == null)
                throw new ArgumentNullException("prevPts");
            if (nextPts == null)
                throw new ArgumentNullException("nextPts");
            prevImg.ThrowIfDisposed();
            nextImg.ThrowIfDisposed();

            Size winSize0 = winSize.GetValueOrDefault(new Size(21, 21));
            TermCriteria criteria0 = criteria.GetValueOrDefault(
                TermCriteria.Both(30, 0.01));

            using (var nextPtsVec = new VectorOfPoint2f())
            using (var statusVec = new VectorOfByte())
            using (var errVec = new VectorOfFloat())
            {
                NativeMethods.video_calcOpticalFlowPyrLK_vector(
                    prevImg.CvPtr, nextImg.CvPtr, prevPts, prevPts.Length,
                    nextPtsVec.CvPtr, statusVec.CvPtr, errVec.CvPtr, 
                    winSize0, maxLevel, criteria0, (int)flags, minEigThreshold);
                nextPts = nextPtsVec.ToArray();
                status = statusVec.ToArray();
                err = errVec.ToArray();
            }
        }
Example #11
0
        /// <summary>
        /// computes sparse optical flow using multi-scale Lucas-Kanade algorithm
        /// </summary>
        /// <param name="prevImg"></param>
        /// <param name="nextImg"></param>
        /// <param name="prevPts"></param>
        /// <param name="nextPts"></param>
        /// <param name="status"></param>
        /// <param name="err"></param>
        /// <param name="winSize"></param>
        /// <param name="maxLevel"></param>
        /// <param name="criteria"></param>
        /// <param name="flags"></param>
        /// <param name="minEigThreshold"></param>
        public static void CalcOpticalFlowPyrLK(
            InputArray prevImg, InputArray nextImg,
            InputArray prevPts, InputOutputArray nextPts,
            OutputArray status, OutputArray err,
            Size? winSize = null,
            int maxLevel = 3,
            TermCriteria? criteria = null,
            OpticalFlowFlags flags = OpticalFlowFlags.None,
            double minEigThreshold = 1e-4)
        {
            if (prevImg == null)
                throw new ArgumentNullException("prevImg");
            if (nextImg == null)
                throw new ArgumentNullException("nextImg");
            if (prevPts == null)
                throw new ArgumentNullException("prevPts");
            if (nextPts == null)
                throw new ArgumentNullException("nextPts");
            if (status == null)
                throw new ArgumentNullException("status");
            if (err == null)
                throw new ArgumentNullException("err");
            prevImg.ThrowIfDisposed();
            nextImg.ThrowIfDisposed();
            prevPts.ThrowIfDisposed();
            nextPts.ThrowIfNotReady();
            status.ThrowIfNotReady();
            err.ThrowIfNotReady();

            Size winSize0 = winSize.GetValueOrDefault(new Size(21, 21));
            TermCriteria criteria0 = criteria.GetValueOrDefault(
                TermCriteria.Both(30, 0.01));

            NativeMethods.video_calcOpticalFlowPyrLK_InputArray(
                prevImg.CvPtr, nextImg.CvPtr, prevPts.CvPtr, nextPts.CvPtr,
                status.CvPtr, err.CvPtr, winSize0,maxLevel,
                criteria0, (int)flags, minEigThreshold);

            nextPts.Fix();
            status.Fix();
            err.Fix();
        }
Example #12
0
        /// <summary>
        /// Finds an object on a back projection image.
        /// </summary>
        /// <param name="probImage">Back projection of the object histogram.</param>
        /// <param name="window">Initial search window.</param>
        /// <param name="criteria">Stop criteria for the iterative search algorithm.</param>
        /// <returns>Number of iterations CAMSHIFT took to converge.</returns>
        public static int MeanShift(
            InputArray probImage, ref Rect window, TermCriteria criteria)
        {
            if (probImage == null)
                throw new ArgumentNullException("probImage");
            probImage.ThrowIfDisposed();

            CvRect window0 = window;
            int result = NativeMethods.video_meanShift(
                probImage.CvPtr, ref window0, criteria);
            window = window0;
            return result;
        }
 public static extern IntPtr features2d_BOWKMeansTrainer_new(
     int clusterCount, TermCriteria termcrit, int attempts, int flags);
Example #14
0
 /// <summary>
 /// clusters the input data using k-Means algorithm
 /// </summary>
 /// <param name="data"></param>
 /// <param name="k"></param>
 /// <param name="bestLabels"></param>
 /// <param name="criteria"></param>
 /// <param name="attempts"></param>
 /// <param name="flags"></param>
 /// <param name="centers"></param>
 /// <returns></returns>
 public static double Kmeans(InputArray data, int k, InputOutputArray bestLabels,
     TermCriteria criteria, int attempts, KMeansFlag flags, OutputArray centers = null)
 {
     if (data == null)
         throw new ArgumentNullException("data");
     if (bestLabels == null)
         throw new ArgumentNullException("bestLabels");
     data.ThrowIfDisposed();
     bestLabels.ThrowIfDisposed();
     double ret = NativeMethods.core_kmeans(data.CvPtr, k, bestLabels.CvPtr, criteria, attempts, (int)flags, ToPtr(centers));
     bestLabels.Fix();
     if(centers != null)
         centers.Fix();
     return ret;
 }
Example #15
0
 /// <summary>
 /// Performs initial step of meanshift segmentation of an image.
 /// The source matrix is 8-bit, 3-channel image.
 /// </summary>
 /// <param name="sp">The spatial window radius.</param>
 /// <param name="sr">The color window radius.</param>
 /// <param name="maxLevel">Maximum level of the pyramid for the segmentation.</param>
 /// <param name="termcrit">Termination criteria: when to stop meanshift iterations.</param>
 /// <returns>The destination image of the same format and the same size as the source.</returns>
 public Mat PyrMeanShiftFiltering(double sp, double sr, int maxLevel = 1, TermCriteria? termcrit = null)
 {
     var dst = new Mat();
     Cv2.PyrMeanShiftFiltering(this, dst, sp, sr, maxLevel, termcrit);
     return dst;
 }
        /// <summary>
        /// 学習データを与えて初期化
        /// </summary>
		/// <param name="termCrit">学習アルゴリズムの終了条件.アルゴリズムにより何度繰り返されるか (逐次型の誤差逆伝播アルゴリズムでは,この数は学習データセットのサイズと掛け合わされる)と,1ターンで重みをどの程度変更するかを指定する.</param>
		/// <param name="trainMethod">用いる学習アルゴリズム</param>
		/// <param name="param1"></param>
		/// <param name="param2"></param>
#else
        /// <summary>
        /// Training constructor
        /// </summary>
        /// <param name="termCrit">The termination criteria for the training algorithm. It identifies how many iterations is done by the algorithm (for sequential backpropagation algorithm the number is multiplied by the size of the training set) and how much the weights could change between the iterations to make the algorithm continue. </param>
        /// <param name="trainMethod">The training algorithm to use</param>
		/// <param name="param1"></param>
		/// <param name="param2"></param>
#endif
        public CvANN_MLP_TrainParams(TermCriteria termCrit, MLPTrainingMethod trainMethod, double param1, double param2 = 0)
		{
            NativeMethods.ml_ANN_MLP_TrainParams_new2(
                termCrit, (int)trainMethod, param1, param2, out data);
		}
        /// <summary>
        /// 学習データを与えて初期化
        /// </summary>
        /// <param name="termCrit">学習アルゴリズムの終了条件.アルゴリズムにより何度繰り返されるか (逐次型の誤差逆伝播アルゴリズムでは,この数は学習データセットのサイズと掛け合わされる)と,1ターンで重みをどの程度変更するかを指定する.</param>
        /// <param name="trainMethod">用いる学習アルゴリズム</param>
        /// <param name="param1"></param>
        /// <param name="param2"></param>
#else
        /// <summary>
        /// Training constructor
        /// </summary>
        /// <param name="termCrit">The termination criteria for the training algorithm. It identifies how many iterations is done by the algorithm (for sequential backpropagation algorithm the number is multiplied by the size of the training set) and how much the weights could change between the iterations to make the algorithm continue. </param>
        /// <param name="trainMethod">The training algorithm to use</param>
        /// <param name="param1"></param>
        /// <param name="param2"></param>
#endif
        public CvANN_MLP_TrainParams(TermCriteria termCrit, MLPTrainingMethod trainMethod, double param1, double param2 = 0)
        {
            NativeMethods.ml_ANN_MLP_TrainParams_new2(
                termCrit, (int)trainMethod, param1, param2, out data);
        }