コード例 #1
0
ファイル: KNearest.cs プロジェクト: ziyouhenzi/opencvsharp
        /// <summary>
        /// Finds the neighbors and predicts responses for input vectors.
        /// </summary>
        /// <param name="samples">Input samples stored by rows.
        /// It is a single-precision floating-point matrix of `[number_of_samples] * k` size.</param>
        /// <param name="k">Number of used nearest neighbors. Should be greater than 1.</param>
        /// <param name="results">Vector with results of prediction (regression or classification) for each
        /// input sample. It is a single-precision floating-point vector with `[number_of_samples]` elements.</param>
        /// <param name="neighborResponses">neighborResponses Optional output values for corresponding neighbors.
        /// It is a single-precision floating-point matrix of `[number_of_samples] * k` size.</param>
        /// <param name="dist">Optional output distances from the input vectors to the corresponding neighbors.
        /// It is a single-precision floating-point matrix of `[number_of_samples] * k` size.</param>
        /// <returns></returns>
        public float FindNearest(InputArray samples, int k, OutputArray results,
                                 OutputArray?neighborResponses = null, OutputArray?dist = null)
        {
            ThrowIfDisposed();
            if (samples == null)
            {
                throw new ArgumentNullException(nameof(samples));
            }
            if (results == null)
            {
                throw new ArgumentNullException(nameof(results));
            }
            samples.ThrowIfDisposed();
            results.ThrowIfNotReady();

            NativeMethods.HandleException(
                NativeMethods.ml_KNearest_findNearest(
                    ptr,
                    samples.CvPtr, k, results.CvPtr,
                    Cv2.ToPtr(neighborResponses), Cv2.ToPtr(dist), out var ret));

            GC.KeepAlive(this);
            GC.KeepAlive(samples);
            GC.KeepAlive(results);
            GC.KeepAlive(neighborResponses);
            GC.KeepAlive(dist);
            results.Fix();
            neighborResponses?.Fix();
            dist?.Fix();
            return(ret);
        }
コード例 #2
0
        /// <summary>
        /// find template on image
        /// </summary>
        /// <param name="image"></param>
        /// <param name="positions"></param>
        /// <param name="votes"></param>
        public virtual void Detect(
            InputArray image, OutputArray positions, OutputArray?votes = null)
        {
            if (image == null)
            {
                throw new ArgumentNullException(nameof(image));
            }
            if (positions == null)
            {
                throw new ArgumentNullException(nameof(positions));
            }
            image.ThrowIfDisposed();
            positions.ThrowIfNotReady();
            votes?.ThrowIfNotReady();

            NativeMethods.HandleException(
                NativeMethods.imgproc_GeneralizedHough_detect1(
                    ptr, image.CvPtr, positions.CvPtr, Cv2.ToPtr(votes)));

            GC.KeepAlive(this);
            GC.KeepAlive(image);
            GC.KeepAlive(positions);
            GC.KeepAlive(votes);
            positions.Fix();
            votes?.Fix();
        }
コード例 #3
0
ファイル: CvText.cs プロジェクト: ziyouhenzi/opencvsharp
        /// <summary>
        /// Applies the Stroke Width Transform operator followed by filtering of connected components of similar Stroke Widths to
        /// return letter candidates. It also chain them by proximity and size, saving the result in chainBBs.
        /// </summary>
        /// <param name="input">input the input image with 3 channels.</param>
        /// <param name="darkOnLight">a boolean value signifying whether the text is darker or lighter than the background,
        /// it is observed to reverse the gradient obtained from Scharr operator, and significantly affect the result.</param>
        /// <param name="draw">an optional Mat of type CV_8UC3 which visualises the detected letters using bounding boxes.</param>
        /// <param name="chainBBs">an optional parameter which chains the letter candidates according to heuristics in the
        /// paper and returns all possible regions where text is likely to occur.</param>
        /// <returns>a vector of resulting bounding boxes where probability of finding text is high</returns>
        public static Rect[] DetectTextSWT(
            InputArray input, bool darkOnLight, OutputArray?draw = null, OutputArray?chainBBs = null)
        {
            if (input == null)
            {
                throw new ArgumentNullException(nameof(input));
            }
            input.ThrowIfDisposed();
            draw?.ThrowIfNotReady();
            chainBBs?.ThrowIfNotReady();

            using var result = new VectorOfRect();
            NativeMethods.HandleException(
                NativeMethods.text_detectTextSWT(
                    input.CvPtr,
                    result.CvPtr,
                    darkOnLight ? 1 : 0,
                    draw?.CvPtr ?? IntPtr.Zero,
                    chainBBs?.CvPtr ?? IntPtr.Zero));

            GC.KeepAlive(input);
            draw?.Fix();
            chainBBs?.Fix();

            return(result.ToArray());
        }
コード例 #4
0
        /// <summary>
        /// Decodes QR code in image once it's found by the detect() method.
        /// Returns UTF8-encoded output string or empty string if the code cannot be decoded.
        /// </summary>
        /// <param name="img">grayscale or color (BGR) image containing QR code.</param>
        /// <param name="points">Quadrangle vertices found by detect() method (or some other algorithm).</param>
        /// <param name="straightQrCode">The optional output image containing rectified and binarized QR code</param>
        /// <returns></returns>
        public string Decode(InputArray img, IEnumerable <Point2f> points, OutputArray?straightQrCode = null)
        {
            if (img == null)
            {
                throw new ArgumentNullException(nameof(img));
            }
            if (points == null)
            {
                throw new ArgumentNullException(nameof(points));
            }
            img.ThrowIfDisposed();
            straightQrCode?.ThrowIfNotReady();

            using var pointsVec    = new VectorOfPoint2f(points);
            using var resultString = new StdString();
            NativeMethods.HandleException(
                NativeMethods.objdetect_QRCodeDetector_decode(
                    ptr, img.CvPtr, pointsVec.CvPtr, Cv2.ToPtr(straightQrCode), resultString.CvPtr));

            GC.KeepAlive(img);
            GC.KeepAlive(points);
            GC.KeepAlive(straightQrCode);
            GC.KeepAlive(this);

            return(resultString.ToString());
        }
コード例 #5
0
        /// <summary>
        /// Estimates Gaussian mixture parameters from the sample set
        /// </summary>
        /// <param name="samples"></param>
        /// <param name="logLikelihoods"></param>
        /// <param name="labels"></param>
        /// <param name="probs"></param>
        /// <returns></returns>
        // ReSharper disable once InconsistentNaming
        public virtual bool TrainEM(
            InputArray samples,
            OutputArray?logLikelihoods = null,
            OutputArray?labels         = null,
            OutputArray?probs          = null)
        {
            ThrowIfDisposed();
            if (samples == null)
            {
                throw new ArgumentNullException(nameof(samples));
            }
            samples.ThrowIfDisposed();

            logLikelihoods?.ThrowIfNotReady();
            labels?.ThrowIfNotReady();
            probs?.ThrowIfNotReady();

            var ret = NativeMethods.ml_EM_trainEM(
                ptr,
                samples.CvPtr,
                Cv2.ToPtr(logLikelihoods),
                Cv2.ToPtr(labels),
                Cv2.ToPtr(probs));

            logLikelihoods?.Fix();
            labels?.Fix();
            probs?.Fix();
            GC.KeepAlive(this);
            GC.KeepAlive(samples);
            GC.KeepAlive(logLikelihoods);
            GC.KeepAlive(labels);
            GC.KeepAlive(probs);
            return(ret != 0);
        }
コード例 #6
0
        /// <summary>
        /// Both detects and decodes QR code
        /// </summary>
        /// <param name="img">grayscale or color (BGR) image containing QR code.</param>
        /// <param name="points">opiotnal output array of vertices of the found QR code quadrangle. Will be empty if not found.</param>
        /// <param name="straightQrcode">The optional output image containing rectified and binarized QR code</param>
        /// <returns></returns>
        public string DetectAndDecode(InputArray img, out Point2f[] points, OutputArray?straightQrcode = null)
        {
            if (img == null)
            {
                throw new ArgumentNullException(nameof(img));
            }
            img.ThrowIfDisposed();
            straightQrcode?.ThrowIfNotReady();

            string result;

            using (var pointsVec = new VectorOfPoint2f())
                using (var resultString = new StdString())
                {
                    NativeMethods.objdetect_QRCodeDetector_detectAndDecode(
                        ptr, img.CvPtr, pointsVec.CvPtr, Cv2.ToPtr(straightQrcode), resultString.CvPtr);
                    points = pointsVec.ToArray();
                    result = resultString.ToString();
                }

            GC.KeepAlive(img);
            GC.KeepAlive(straightQrcode);
            GC.KeepAlive(this);

            return(result);
        }
コード例 #7
0
ファイル: StatModel.cs プロジェクト: yushuiqiang/opencvsharp
        /// <summary>
        /// Predicts response(s) for the provided sample(s)
        /// </summary>
        /// <param name="samples">The input samples, floating-point matrix</param>
        /// <param name="results">The optional output matrix of results.</param>
        /// <param name="flags">The optional flags, model-dependent.</param>
        /// <returns></returns>
        public virtual float Predict(InputArray samples, OutputArray?results = null, Flags flags = 0)
        {
            if (ptr == IntPtr.Zero)
            {
                throw new ObjectDisposedException(GetType().Name);
            }
            if (samples == null)
            {
                throw new ArgumentNullException(nameof(samples));
            }
            samples.ThrowIfDisposed();
            if (results != null)
            {
                results.ThrowIfNotReady();
            }

            float ret = NativeMethods.ml_StatModel_predict(
                ptr, samples.CvPtr, Cv2.ToPtr(results), (int)flags);

            GC.KeepAlive(this);
            GC.KeepAlive(samples);
            GC.KeepAlive(results);
            if (results != null)
            {
                results.Fix();
            }
            return(ret);
        }
コード例 #8
0
ファイル: EM.cs プロジェクト: zhaohh1985/opencvsharp
        /// <summary>
        /// Estimate the Gaussian mixture parameters from a samples set.
        /// </summary>
        /// <param name="samples">Samples from which the Gaussian mixture model will be estimated. It should be a
        /// one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
        /// it will be converted to the inner matrix of such type for the further computing.</param>
        /// <param name="means0">Initial means \f$a_k\f$ of mixture components. It is a one-channel matrix of
        /// \f$nclusters \times dims\f$ size. If the matrix does not have CV_64F type it will be
        /// converted to the inner matrix of such type for the further computing.</param>
        /// <param name="covs0">The vector of initial covariance matrices \f$S_k\f$ of mixture components. Each of
        /// covariance matrices is a one-channel matrix of \f$dims \times dims\f$ size. If the matrices
        /// do not have CV_64F type they will be converted to the inner matrices of such type for the further computing.</param>
        /// <param name="weights0">Initial weights \f$\pi_k\f$ of mixture components. It should be a one-channel
        /// floating-point matrix with \f$1 \times nclusters\f$ or \f$nclusters \times 1\f$ size.</param>
        /// <param name="logLikelihoods">The optional output matrix that contains a likelihood logarithm value for
        /// each sample. It has \f$nsamples \times 1\f$ size and CV_64FC1 type.</param>
        /// <param name="labels">The optional output "class label" for each sample:
        /// \f$\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\f$ (indices of the most probable
        /// mixture component for each sample). It has \f$nsamples \times 1\f$ size and CV_32SC1 type.</param>
        /// <param name="probs">The optional output matrix that contains posterior probabilities of each Gaussian
        /// mixture component given the each sample. It has \f$nsamples \times nclusters\f$ size and CV_64FC1 type.</param>
        public virtual bool TrainE(
            InputArray samples,
            InputArray means0,
            InputArray?covs0           = null,
            InputArray?weights0        = null,
            OutputArray?logLikelihoods = null,
            OutputArray?labels         = null,
            OutputArray?probs          = null)
        {
            ThrowIfDisposed();
            if (samples == null)
            {
                throw new ArgumentNullException(nameof(samples));
            }
            if (means0 == null)
            {
                throw new ArgumentNullException(nameof(means0));
            }
            samples.ThrowIfDisposed();
            means0.ThrowIfDisposed();

            logLikelihoods?.ThrowIfNotReady();
            covs0?.ThrowIfDisposed();
            weights0?.ThrowIfDisposed();
            labels?.ThrowIfNotReady();
            probs?.ThrowIfNotReady();

            NativeMethods.HandleException(
                NativeMethods.ml_EM_trainE(
                    ptr,
                    samples.CvPtr,
                    means0.CvPtr,
                    Cv2.ToPtr(covs0),
                    Cv2.ToPtr(weights0),
                    Cv2.ToPtr(logLikelihoods),
                    Cv2.ToPtr(labels),
                    Cv2.ToPtr(probs),
                    out var ret));

            logLikelihoods?.Fix();
            labels?.Fix();
            probs?.Fix();
            GC.KeepAlive(this);
            GC.KeepAlive(samples);
            GC.KeepAlive(means0);
            GC.KeepAlive(covs0);
            GC.KeepAlive(weights0);
            GC.KeepAlive(logLikelihoods);
            GC.KeepAlive(labels);
            GC.KeepAlive(probs);
            return(ret != 0);
        }
コード例 #9
0
        /// <summary>
        /// Pose estimation for single markers
        /// </summary>
        /// <param name="corners">corners vector of already detected markers corners.
        /// For each marker, its four corners are provided, (e.g std::vector&lt;std::vector&lt;cv::Point2f&gt;&gt; ).
        /// For N detected markers, the dimensions of this array should be Nx4. The order of the corners should be clockwise.</param>
        /// <param name="markerLength">the length of the markers' side. The returning translation vectors will
        /// be in the same unit.Normally, unit is meters.</param>
        /// <param name="cameraMatrix">input 3x3 floating-point camera matrix
        /// \f$A = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$</param>
        /// <param name="distortionCoefficients">vector of distortion coefficients
        /// \f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6],[s_1, s_2, s_3, s_4]])\f$ of 4, 5, 8 or 12 elements</param>
        /// <param name="rvec">array of output rotation vectors (@sa Rodrigues) (e.g. std::vector&lt;cv::Vec3d&gt;).
        /// Each element in rvecs corresponds to the specific marker in imgPoints.</param>
        /// <param name="tvec">array of output translation vectors (e.g. std::vector&lt;cv::Vec3d&gt;).
        /// Each element in tvecs corresponds to the specific marker in imgPoints.</param>
        /// <param name="objPoints">array of object points of all the marker corners</param>
        public static void EstimatePoseSingleMarkers(
            Point2f[][] corners,
            float markerLength,
            InputArray cameraMatrix,
            InputArray distortionCoefficients,
            OutputArray rvec,
            OutputArray tvec,
            OutputArray?objPoints = null)
        {
            if (corners == null)
            {
                throw new ArgumentNullException(nameof(corners));
            }
            if (cameraMatrix == null)
            {
                throw new ArgumentNullException(nameof(cameraMatrix));
            }
            if (distortionCoefficients == null)
            {
                throw new ArgumentNullException(nameof(distortionCoefficients));
            }
            if (rvec == null)
            {
                throw new ArgumentNullException(nameof(rvec));
            }
            if (tvec == null)
            {
                throw new ArgumentNullException(nameof(tvec));
            }

            cameraMatrix.ThrowIfDisposed();
            distortionCoefficients.ThrowIfDisposed();
            rvec.ThrowIfNotReady();
            tvec.ThrowIfNotReady();
            objPoints?.ThrowIfNotReady();

            using var cornersAddress = new ArrayAddress2 <Point2f>(corners);

            NativeMethods.HandleException(
                NativeMethods.aruco_estimatePoseSingleMarkers(
                    cornersAddress.GetPointer(), cornersAddress.GetDim1Length(), cornersAddress.GetDim2Lengths(),
                    markerLength, cameraMatrix.CvPtr, distortionCoefficients.CvPtr, rvec.CvPtr, tvec.CvPtr,
                    objPoints?.CvPtr ?? IntPtr.Zero));

            GC.KeepAlive(cameraMatrix);
            GC.KeepAlive(distortionCoefficients);
            rvec.Fix();
            tvec.Fix();
            objPoints?.Fix();
        }
コード例 #10
0
        /// <summary>
        /// static method for computing quality
        /// </summary>
        /// <param name="ref"></param>
        /// <param name="cmp"></param>
        /// <param name="qualityMap">output quality map, or null</param>
        /// <returns>cv::Scalar with per-channel quality values.  Values range from 0 (worst) to 1 (best)</returns>
        public static Scalar Compute(InputArray @ref, InputArray cmp, OutputArray? qualityMap)
        {
            if (@ref == null)
                throw new ArgumentNullException(nameof(@ref));
            if (cmp == null)
                throw new ArgumentNullException(nameof(cmp));
            @ref.ThrowIfDisposed();
            cmp.ThrowIfDisposed();
            qualityMap?.ThrowIfNotReady();

            var ret = NativeMethods.quality_QualityGMSD_staticCompute(@ref.CvPtr, cmp.CvPtr, qualityMap?.CvPtr ?? IntPtr.Zero);

            GC.KeepAlive(@ref);
            GC.KeepAlive(cmp);
            qualityMap?.Fix();
            return ret;
        }
コード例 #11
0
ファイル: EM.cs プロジェクト: zhaohh1985/opencvsharp
        /// <summary>
        /// Predicts the response for sample
        /// </summary>
        /// <param name="sample">A sample for classification. It should be a one-channel matrix of
        /// \f$1 \times dims\f$ or \f$dims \times 1\f$ size.</param>
        /// <param name="probs">Optional output matrix that contains posterior probabilities of each component
        /// given the sample. It has \f$1 \times nclusters\f$ size and CV_64FC1 type.</param>
        public virtual Vec2d Predict2(InputArray sample, OutputArray?probs = null)
        {
            ThrowIfDisposed();
            if (sample == null)
            {
                throw new ArgumentNullException(nameof(sample));
            }
            sample.ThrowIfDisposed();
            probs?.ThrowIfNotReady();

            NativeMethods.HandleException(
                NativeMethods.ml_EM_predict2(ptr, sample.CvPtr, Cv2.ToPtr(probs), out var ret));
            probs?.Fix();
            GC.KeepAlive(this);
            GC.KeepAlive(sample);
            GC.KeepAlive(probs);
            return(ret);
        }
コード例 #12
0
        /// <summary>
        /// Predicts responses for input samples and returns a float type.
        /// </summary>
        /// <param name="samples">The input data for the prediction algorithm. Matrix [m x n],
        /// where each row contains variables (features) of one object being classified.
        /// Should have data type CV_32F.</param>
        /// <param name="results">Predicted labels as a column matrix of type CV_32S.</param>
        /// <param name="flags">Not used.</param>
        /// <returns></returns>
        public float Predict(InputArray samples, OutputArray?results = null, int flags = 0)
        {
            ThrowIfDisposed();
            if (samples == null)
            {
                throw new ArgumentNullException(nameof(samples));
            }
            samples.ThrowIfDisposed();
            results?.ThrowIfNotReady();

            NativeMethods.HandleException(
                NativeMethods.ml_LogisticRegression_predict(ptr, samples.CvPtr, Cv2.ToPtr(results), flags, out var ret));
            GC.KeepAlive(this);
            GC.KeepAlive(samples);
            GC.KeepAlive(results);
            results?.Fix();

            return(ret);
        }
コード例 #13
0
        /// <summary>
        /// find template on image
        /// </summary>
        /// <param name="edges"></param>
        /// <param name="dx"></param>
        /// <param name="dy"></param>
        /// <param name="positions"></param>
        /// <param name="votes"></param>
        public virtual void Detect(
            InputArray edges, InputArray dx, InputArray dy, OutputArray positions, OutputArray?votes = null)
        {
            if (edges == null)
            {
                throw new ArgumentNullException(nameof(edges));
            }
            if (dx == null)
            {
                throw new ArgumentNullException(nameof(dx));
            }
            if (dy == null)
            {
                throw new ArgumentNullException(nameof(dy));
            }
            if (positions == null)
            {
                throw new ArgumentNullException(nameof(positions));
            }
            edges.ThrowIfDisposed();
            dx.ThrowIfDisposed();
            dy.ThrowIfDisposed();
            positions.ThrowIfNotReady();
            if (votes != null)
            {
                votes.ThrowIfNotReady();
            }

            NativeMethods.imgproc_GeneralizedHough_detect2(
                ptr, edges.CvPtr, dx.CvPtr, dy.CvPtr, positions.CvPtr, Cv2.ToPtr(votes));
            GC.KeepAlive(this);
            GC.KeepAlive(edges);
            GC.KeepAlive(dx);
            GC.KeepAlive(dy);
            GC.KeepAlive(positions);
            GC.KeepAlive(votes);
            positions.Fix();
            if (votes != null)
            {
                votes.Fix();
            }
        }
コード例 #14
0
ファイル: QualityPSNR.cs プロジェクト: Noemata/OCVSharpTest
        /// <summary>
        /// static method for computing quality
        /// </summary>
        /// <param name="ref"></param>
        /// <param name="cmp"></param>
        /// <param name="qualityMap">output quality map, or null</param>
        /// <param name="maxPixelValue">maximum per-channel value for any individual pixel; eg 255 for uint8 image</param>
        /// <returns>PSNR value, or double.PositiveInfinity if the MSE between the two images == 0</returns>
        public static Scalar Compute(InputArray @ref, InputArray cmp, OutputArray?qualityMap, double maxPixelValue = MaxPixelValueDefault)
        {
            if (@ref == null)
            {
                throw new ArgumentNullException(nameof(@ref));
            }
            if (cmp == null)
            {
                throw new ArgumentNullException(nameof(cmp));
            }
            @ref.ThrowIfDisposed();
            cmp.ThrowIfDisposed();
            qualityMap?.ThrowIfNotReady();

            var ret = NativeMethods.quality_QualityPSNR_staticCompute(@ref.CvPtr, cmp.CvPtr, qualityMap?.CvPtr ?? IntPtr.Zero, maxPixelValue);

            GC.KeepAlive(@ref);
            GC.KeepAlive(cmp);
            qualityMap?.Fix();
            return(ret);
        }
コード例 #15
0
ファイル: QualitySSIM.cs プロジェクト: ziyouhenzi/opencvsharp
        /// <summary>
        /// static method for computing quality
        /// </summary>
        /// <param name="ref"></param>
        /// <param name="cmp"></param>
        /// <param name="qualityMap">output quality map, or null</param>
        /// <returns>cv::Scalar with per-channel quality values.  Values range from 0 (worst) to 1 (best)</returns>
        public static Scalar Compute(InputArray @ref, InputArray cmp, OutputArray?qualityMap)
        {
            if (@ref == null)
            {
                throw new ArgumentNullException(nameof(@ref));
            }
            if (cmp == null)
            {
                throw new ArgumentNullException(nameof(cmp));
            }
            @ref.ThrowIfDisposed();
            cmp.ThrowIfDisposed();
            qualityMap?.ThrowIfNotReady();

            NativeMethods.HandleException(
                NativeMethods.quality_QualitySSIM_staticCompute(
                    @ref.CvPtr, cmp.CvPtr, qualityMap?.CvPtr ?? IntPtr.Zero, out var ret));

            GC.KeepAlive(@ref);
            GC.KeepAlive(cmp);
            qualityMap?.Fix();
            return(ret);
        }
コード例 #16
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="frame0"></param>
        /// <param name="frame1"></param>
        /// <param name="flow1"></param>
        /// <param name="flow2"></param>
        public virtual void Calc(InputArray frame0, InputArray frame1, OutputArray flow1, OutputArray?flow2 = null)
        {
            if (frame0 == null)
            {
                throw new ArgumentNullException(nameof(frame0));
            }
            if (frame1 == null)
            {
                throw new ArgumentNullException(nameof(frame1));
            }
            if (flow1 == null)
            {
                throw new ArgumentNullException(nameof(flow1));
            }
            frame0.ThrowIfDisposed();
            frame1.ThrowIfDisposed();
            flow1.ThrowIfNotReady();
            flow2?.ThrowIfNotReady();

            NativeMethods.HandleException(
                NativeMethods.superres_DenseOpticalFlowExt_calc(
                    ptr, frame0.CvPtr, frame1.CvPtr, flow1.CvPtr, Cv2.ToPtr(flow2)));

            GC.KeepAlive(this);
            GC.KeepAlive(frame0);
            GC.KeepAlive(frame1);
            GC.KeepAlive(flow1);
            GC.KeepAlive(flow2);
            flow1.Fix();
            flow2?.Fix();
        }