예제 #1
0
        //
        // C++:  int meanShift(Mat probImage, Rect& window, TermCriteria criteria)
        //

        //javadoc: meanShift(probImage, window, criteria)
        public static int meanShift(Mat probImage, Rect window, TermCriteria criteria)
        {
            if (probImage != null)
            {
                probImage.ThrowIfDisposed();
            }
#if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            double[] window_out = new double[4];
            int      retVal     = video_Video_meanShift_10(probImage.nativeObj, window.x, window.y, window.width, window.height, window_out, criteria.type, criteria.maxCount, criteria.epsilon);
            if (window != null)
            {
                window.x = (int)window_out[0]; window.y = (int)window_out[1]; window.width = (int)window_out[2]; window.height = (int)window_out[3];
            }
            return(retVal);
#else
            return(-1);
#endif
        }
예제 #2
0
        //
        // C++:  RotatedRect CamShift(Mat probImage, Rect& window, TermCriteria criteria)
        //

        //javadoc: CamShift(probImage, window, criteria)
        public static RotatedRect CamShift(Mat probImage, Rect window, TermCriteria criteria)
        {
            if (probImage != null)
            {
                probImage.ThrowIfDisposed();
            }

#if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) || UNITY_5
            double[] window_out = new double[4];
            double[] tmpArray   = new double[5];

            video_Video_CamShift_10(probImage.nativeObj, window.x, window.y, window.width, window.height, window_out, criteria.type, criteria.maxCount, criteria.epsilon, tmpArray);
            RotatedRect retVal = new RotatedRect(tmpArray);
            if (window != null)
            {
                window.x = (int)window_out[0]; window.y = (int)window_out[1]; window.width = (int)window_out[2]; window.height = (int)window_out[3];
            }
            return(retVal);
#else
            return(null);
#endif
        }
예제 #3
0
        //
        // C++:   BOWKMeansTrainer(int clusterCount, TermCriteria termcrit = TermCriteria(), int attempts = 3, int flags = KMEANS_PP_CENTERS)
        //

        //javadoc: BOWKMeansTrainer::BOWKMeansTrainer(clusterCount, termcrit, attempts, flags)
        public BOWKMeansTrainer(int clusterCount, TermCriteria termcrit, int attempts, int flags) :
#if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            base(features2d_BOWKMeansTrainer_BOWKMeansTrainer_10(clusterCount, termcrit.type, termcrit.maxCount, termcrit.epsilon, attempts, flags))
#else
            base(IntPtr.Zero)
예제 #4
0
        //
        // C++:   EM::EM(int nclusters = EM::DEFAULT_NCLUSTERS, int covMatType = EM::COV_MAT_DIAGONAL, TermCriteria termCrit = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, EM::DEFAULT_MAX_ITERS, FLT_EPSILON))
        //

        /**
         * <p>The constructor of the class</p>
         *
         * @param nclusters The number of mixture components in the Gaussian mixture
         * model. Default value of the parameter is <code>EM.DEFAULT_NCLUSTERS=5</code>.
         * Some of EM implementation could determine the optimal number of mixtures
         * within a specified value range, but that is not the case in ML yet.
         * @param covMatType Constraint on covariance matrices which defines type of
         * matrices. Possible values are:
         * <ul>
         *   <li> EM.COV_MAT_SPHERICAL A scaled identity matrix <em>mu_k * I</em>.
         * There is the only parameter <em>mu_k</em> to be estimated for each matrix.
         * The option may be used in special cases, when the constraint is relevant, or
         * as a first step in the optimization (for example in case when the data is
         * preprocessed with PCA). The results of such preliminary estimation may be
         * passed again to the optimization procedure, this time with <code>covMatType=EM.COV_MAT_DIAGONAL</code>.
         *   <li> EM.COV_MAT_DIAGONAL A diagonal matrix with positive diagonal
         * elements. The number of free parameters is <code>d</code> for each matrix.
         * This is most commonly used option yielding good estimation results.
         *   <li> EM.COV_MAT_GENERIC A symmetric positively defined matrix. The number
         * of free parameters in each matrix is about <em>d^2/2</em>. It is not
         * recommended to use this option, unless there is pretty accurate initial
         * estimation of the parameters and/or a huge number of training samples.
         * </ul>
         * @param termCrit The termination criteria of the EM algorithm. The EM
         * algorithm can be terminated by the number of iterations <code>termCrit.maxCount</code>
         * (number of M-steps) or when relative change of likelihood logarithm is less
         * than <code>termCrit.epsilon</code>. Default maximum number of iterations is
         * <code>EM.DEFAULT_MAX_ITERS=100</code>.
         *
         * @see <a href="http://docs.opencv.org/modules/ml/doc/expectation_maximization.html#em-em">org.opencv.ml.EM.EM</a>
         */
        public EM(int nclusters, int covMatType, TermCriteria termCrit)
            :
                                #if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) || UNITY_5
            base(ml_EM_EM_10(nclusters, covMatType, termCrit.type, termCrit.maxCount, termCrit.epsilon))
                                #else
            base(IntPtr.Zero)
예제 #5
0
        //javadoc: calibrate(objectPoints, imagePoints, size, K, xi, D, rvecs, tvecs, flags, criteria)
        public static double calibrate(Mat objectPoints, Mat imagePoints, Size size, Mat K, Mat xi, Mat D, List <Mat> rvecs, List <Mat> tvecs, int flags, TermCriteria criteria)
        {
            if (objectPoints != null)
            {
                objectPoints.ThrowIfDisposed();
            }
            if (imagePoints != null)
            {
                imagePoints.ThrowIfDisposed();
            }
            if (K != null)
            {
                K.ThrowIfDisposed();
            }
            if (xi != null)
            {
                xi.ThrowIfDisposed();
            }
            if (D != null)
            {
                D.ThrowIfDisposed();
            }

#if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) || UNITY_5
            Mat    rvecs_mat = new Mat();
            Mat    tvecs_mat = new Mat();
            double retVal    = ccalib_Ccalib_calibrate_11(objectPoints.nativeObj, imagePoints.nativeObj, size.width, size.height, K.nativeObj, xi.nativeObj, D.nativeObj, rvecs_mat.nativeObj, tvecs_mat.nativeObj, flags, criteria.type, criteria.maxCount, criteria.epsilon);
            Converters.Mat_to_vector_Mat(rvecs_mat, rvecs);
            rvecs_mat.release();
            Converters.Mat_to_vector_Mat(tvecs_mat, tvecs);
            tvecs_mat.release();
            return(retVal);
#else
            return(-1);
#endif
        }
예제 #6
0
        //javadoc: stereoCalibrate(objectPoints, imagePoints1, imagePoints2, imageSize1, imageSize2, K1, xi1, D1, K2, xi2, D2, rvec, tvec, rvecsL, tvecsL, flags, criteria)
        public static double stereoCalibrate(List <Mat> objectPoints, List <Mat> imagePoints1, List <Mat> imagePoints2, Size imageSize1, Size imageSize2, Mat K1, Mat xi1, Mat D1, Mat K2, Mat xi2, Mat D2, Mat rvec, Mat tvec, List <Mat> rvecsL, List <Mat> tvecsL, int flags, TermCriteria criteria)
        {
            if (K1 != null)
            {
                K1.ThrowIfDisposed();
            }
            if (xi1 != null)
            {
                xi1.ThrowIfDisposed();
            }
            if (D1 != null)
            {
                D1.ThrowIfDisposed();
            }
            if (K2 != null)
            {
                K2.ThrowIfDisposed();
            }
            if (xi2 != null)
            {
                xi2.ThrowIfDisposed();
            }
            if (D2 != null)
            {
                D2.ThrowIfDisposed();
            }
            if (rvec != null)
            {
                rvec.ThrowIfDisposed();
            }
            if (tvec != null)
            {
                tvec.ThrowIfDisposed();
            }

#if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) || UNITY_5
            Mat    objectPoints_mat = Converters.vector_Mat_to_Mat(objectPoints);
            Mat    imagePoints1_mat = Converters.vector_Mat_to_Mat(imagePoints1);
            Mat    imagePoints2_mat = Converters.vector_Mat_to_Mat(imagePoints2);
            Mat    rvecsL_mat       = new Mat();
            Mat    tvecsL_mat       = new Mat();
            double retVal           = ccalib_Ccalib_stereoCalibrate_11(objectPoints_mat.nativeObj, imagePoints1_mat.nativeObj, imagePoints2_mat.nativeObj, imageSize1.width, imageSize1.height, imageSize2.width, imageSize2.height, K1.nativeObj, xi1.nativeObj, D1.nativeObj, K2.nativeObj, xi2.nativeObj, D2.nativeObj, rvec.nativeObj, tvec.nativeObj, rvecsL_mat.nativeObj, tvecsL_mat.nativeObj, flags, criteria.type, criteria.maxCount, criteria.epsilon);
            Converters.Mat_to_vector_Mat(objectPoints_mat, objectPoints);
            objectPoints_mat.release();
            Converters.Mat_to_vector_Mat(imagePoints1_mat, imagePoints1);
            imagePoints1_mat.release();
            Converters.Mat_to_vector_Mat(imagePoints2_mat, imagePoints2);
            imagePoints2_mat.release();
            Converters.Mat_to_vector_Mat(rvecsL_mat, rvecsL);
            rvecsL_mat.release();
            Converters.Mat_to_vector_Mat(tvecsL_mat, tvecsL);
            tvecsL_mat.release();
            return(retVal);
#else
            return(-1);
#endif
        }
예제 #7
0
        //
        // C++:  void calcOpticalFlowPyrLK(Mat prevImg, Mat nextImg, vector_Point2f prevPts, vector_Point2f& nextPts, vector_uchar& status, vector_float& err, Size winSize = Size(21,21), int maxLevel = 3, TermCriteria criteria = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 0.01), int flags = 0, double minEigThreshold = 1e-4)
        //

        //javadoc: calcOpticalFlowPyrLK(prevImg, nextImg, prevPts, nextPts, status, err, winSize, maxLevel, criteria, flags, minEigThreshold)
        public static void calcOpticalFlowPyrLK(Mat prevImg, Mat nextImg, MatOfPoint2f prevPts, MatOfPoint2f nextPts, MatOfByte status, MatOfFloat err, Size winSize, int maxLevel, TermCriteria criteria, int flags, double minEigThreshold)
        {
            if (prevImg != null)
            {
                prevImg.ThrowIfDisposed();
            }
            if (nextImg != null)
            {
                nextImg.ThrowIfDisposed();
            }
            if (prevPts != null)
            {
                prevPts.ThrowIfDisposed();
            }
            if (nextPts != null)
            {
                nextPts.ThrowIfDisposed();
            }
            if (status != null)
            {
                status.ThrowIfDisposed();
            }
            if (err != null)
            {
                err.ThrowIfDisposed();
            }
#if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat prevPts_mat = prevPts;
            Mat nextPts_mat = nextPts;
            Mat status_mat  = status;
            Mat err_mat     = err;
            video_Video_calcOpticalFlowPyrLK_10(prevImg.nativeObj, nextImg.nativeObj, prevPts_mat.nativeObj, nextPts_mat.nativeObj, status_mat.nativeObj, err_mat.nativeObj, winSize.width, winSize.height, maxLevel, criteria.type, criteria.maxCount, criteria.epsilon, flags, minEigThreshold);

            return;
#else
            return;
#endif
        }
예제 #8
0
        //
        // C++:  double findTransformECC(Mat templateImage, Mat inputImage, Mat& warpMatrix, int motionType = MOTION_AFFINE, TermCriteria criteria = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 50, 0.001), Mat inputMask = Mat())
        //

        //javadoc: findTransformECC(templateImage, inputImage, warpMatrix, motionType, criteria, inputMask)
        public static double findTransformECC(Mat templateImage, Mat inputImage, Mat warpMatrix, int motionType, TermCriteria criteria, Mat inputMask)
        {
            if (templateImage != null)
            {
                templateImage.ThrowIfDisposed();
            }
            if (inputImage != null)
            {
                inputImage.ThrowIfDisposed();
            }
            if (warpMatrix != null)
            {
                warpMatrix.ThrowIfDisposed();
            }
            if (inputMask != null)
            {
                inputMask.ThrowIfDisposed();
            }
#if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            double retVal = video_Video_findTransformECC_10(templateImage.nativeObj, inputImage.nativeObj, warpMatrix.nativeObj, motionType, criteria.type, criteria.maxCount, criteria.epsilon, inputMask.nativeObj);

            return(retVal);
#else
            return(-1);
#endif
        }