/// <summary>
        /// Detect keypoints in the OclImage
        /// </summary>
        /// <param name="img">The image where keypoints will be detected from</param>
        /// <param name="mask">The optional mask, can be null if not needed</param>
        /// <returns>
        /// The keypoints OclMat that will have 1 row.
        /// keypoints.at&lt;float[6]&gt;(1, i) contains i'th keypoint
        /// format: (x, y, size, response, angle, octave)
        /// </returns>
        public OclMat <float> DetectKeyPointsRaw(OclImage <Gray, Byte> img, OclImage <Gray, Byte> mask)
        {
            OclMat <float> result = new OclMat <float>();

            OclInvoke.oclSURFDetectorDetectKeyPoints(_ptr, img, mask, result);
            return(result);
        }
        /// <summary>
        /// Compute the descriptor given the image and the point location
        /// </summary>
        /// <param name="image">The image where the descriptor will be computed from</param>
        /// <param name="mask">The optional mask, can be null if not needed</param>
        /// <param name="keyPoints">The keypoint where the descriptor will be computed from. The order of the keypoints might be changed unless the GPU_SURF detector is UP-RIGHT.</param>
        /// <returns>The image features founded on the keypoint location</returns>
        public OclMat <float> ComputeDescriptorsRaw(OclImage <Gray, Byte> image, OclImage <Gray, byte> mask, OclMat <float> keyPoints)
        {
            OclMat <float> descriptors = new OclMat <float>(keyPoints.Size.Height, DescriptorSize, 1);

            OclInvoke.oclSURFDetectorCompute(_ptr, image, mask, keyPoints, descriptors, true);
            return(descriptors);
        }
Exemple #3
0
 /// <summary>
 /// Calculate an optical flow for a sparse feature set.
 /// </summary>
 /// <param name="frame0">First 8-bit input image (supports both grayscale and color images).</param>
 /// <param name="frame1">Second input image of the same size and the same type as <paramref name="frame0"/></param>
 /// <param name="points0">
 /// Vector of 2D points for which the flow needs to be found. It must be one row
 /// matrix with 2 channels
 /// </param>
 /// <param name="points1">
 /// Output vector of 2D points (with single-precision two channel floating-point coordinates)
 /// containing the calculated new positions of input features in the second image.</param>
 /// <param name="status">
 /// Output status vector (CV_8UC1 type). Each element of the vector is set to 1 if the
 /// flow for the corresponding features has been found. Otherwise, it is set to 0.
 /// </param>
 /// <param name="err">
 /// Output vector (CV_32FC1 type) that contains the difference between patches around
 /// the original and moved points or min eigen value if getMinEigenVals is checked. It can be
 /// null, if not needed.
 /// </param>
 public void Sparse(OclImage <Gray, byte> frame0, OclImage <Gray, byte> frame1, OclMat <float> points0, out OclMat <float> points1, out OclMat <Byte> status, out OclMat <float> err)
 {
     points1 = new OclMat <float>();
     status  = new OclMat <byte>();
     err     = new OclMat <float>();
     OclInvoke.oclPyrLKOpticalFlowSparse(_ptr, frame0, frame1, points0, points1, status, err);
 }
        /*
         * /// <summary>
         * /// Add the model descriptors
         * /// </summary>
         * /// <param name="modelDescriptors">The model discriptors</param>
         * public void Add(Matrix<Byte> modelDescriptors)
         * {
         * if (!(_distanceType == DistanceType.HammingDist))
         *    throw new ArgumentException("Hamming distance type requires model descriptor to be Matrix<Byte>");
         * gpuBruteForceMatcherAdd(_ptr, modelDescriptors);
         * }
         *
         * /// <summary>
         * /// Add the model descriptors
         * /// </summary>
         * /// <param name="modelDescriptors">The model discriptors</param>
         * public void Add(Matrix<float> modelDescriptors)
         * {
         * if (!(_distanceType == DistanceType.L2 || _distanceType == DistanceType.L1))
         *    throw new ArgumentException("L1 / L2 distance type requires model descriptor to be Matrix<float>");
         * gpuBruteForceMatcherAdd(_ptr, modelDescriptors);
         * }*/

        /// <summary>
        /// Find the k nearest neighbour using the brute force matcher.
        /// </summary>
        /// <param name="queryDescriptors">The query descriptors</param>
        /// <param name="modelDescriptors">The model descriptors</param>
        /// <param name="modelIdx">The model index. A n x <paramref name="k"/> matrix where n = <paramref name="queryDescriptors"/>.Cols</param>
        /// <param name="distance">The matrix where the distance valus is stored. A n x <paramref name="k"/> matrix where n = <paramref name="queryDescriptors"/>.Size.Height</param>
        /// <param name="k">The number of nearest neighbours to be searched</param>
        /// <param name="mask">The mask</param>
        /// <param name="stream">Use a Stream to call the function asynchronously (non-blocking) or null to call the function synchronously (blocking).</param>
        public void KnnMatchSingle(OclMat <T> queryDescriptors, OclMat <T> modelDescriptors, OclMat <int> modelIdx, OclMat <float> distance, int k, OclMat <Byte> mask)
        {
            /*
             * if (k == 2 && !(modelIdx.IsContinuous && distance.IsContinuous))
             * {
             * throw new ArgumentException("For k == 2, the allocated index matrix and distance matrix must be continuous");
             * }*/
            OclInvoke.oclBruteForceMatcherKnnMatchSingle(_ptr, queryDescriptors, modelDescriptors, modelIdx, distance, k, mask);
        }
 /// <summary>
 /// Detect keypoints in the OclImage
 /// </summary>
 /// <param name="img">The image where keypoints will be detected from</param>
 /// <param name="mask">The optional mask, can be null if not needed</param>
 /// <returns>An array of keypoints</returns>
 public MKeyPoint[] DetectKeyPoints(OclImage <Gray, Byte> img, OclImage <Gray, Byte> mask)
 {
     using (OclMat <float> tmp = DetectKeyPointsRaw(img, mask))
         using (VectorOfKeyPoint kpts = new VectorOfKeyPoint())
         {
             DownloadKeypoints(tmp, kpts);
             return(kpts.ToArray());
         }
 }
 /// <summary>
 /// Obtain the keypoints array from OclMat
 /// </summary>
 /// <param name="src">The keypoints obtained from DetectKeyPointsRaw</param>
 /// <param name="dst">The vector of keypoints</param>
 public void DownloadKeypoints(OclMat <float> src, VectorOfKeyPoint dst)
 {
     OclInvoke.oclSURFDownloadKeypoints(_ptr, src, dst);
 }
 /// <summary>
 /// Obtain an OclMat from the keypoints array
 /// </summary>
 /// <param name="src">The keypoints array</param>
 /// <param name="dst">An OclMat that represent the keypoints</param>
 public void UploadKeypoints(VectorOfKeyPoint src, OclMat <float> dst)
 {
     OclInvoke.oclSURFUploadKeypoints(_ptr, src, dst);
 }