/// <summary>
        /// Finds the pattern.
        /// </summary>
        /// <returns><c>true</c>, if pattern was found, <c>false</c> otherwise.</returns>
        /// <param name="image">Image.</param>
        /// <param name="info">Info.</param>
        public bool findPattern(Mat image, PatternTrackingInfo info)
        {
            // Convert input image to gray
            getGray(image, ref m_grayImg);
            //Cv2.CvtColor(image, m_grayImg, ColorConversionCodes.BGR2GRAY);
            // Extract feature points from input gray image
            extractFeatures(m_grayImg, ref m_queryKeypoints, ref m_queryDescriptors);

            // Get matches with current pattern
            getMatches(m_queryDescriptors, ref m_matches);

            m_roughHomography = new Mat();
            // Find homography transformation and detect good matches
            bool homographyFound = refineMatchesWithHomography(
                m_queryKeypoints,
                m_pattern.keyPoints,
                homographyReprojectionThreshold,
                ref m_matches,
                ref m_roughHomography,
                HomographyMethods.Ransac);

            if (homographyFound)
            {
                // If homography refinement enabled improve found transformation
                if (enableRatioTest)
                {
                    // Warp image using found homography
                    Cv2.WarpPerspective(m_grayImg, m_warpedImg, m_roughHomography, m_pattern.size, InterpolationFlags.WarpInverseMap | InterpolationFlags.Cubic);
                    Cv2.ImShow("second img", m_warpedImg);
                    KeyPoint[] warpedqueryKeypoints = null;
                    extractFeatures(m_warpedImg, ref warpedqueryKeypoints, ref m_queryDescriptors);
                    DMatch[] refinedMatches = null;
                    // Match with pattern
                    getMatches(m_queryDescriptors, ref refinedMatches);
                    m_refinedHomography = new Mat();
                    // Estimate new refinement homography
                    homographyFound = refineMatchesWithHomography(
                        warpedqueryKeypoints,
                        m_pattern.keyPoints,
                        homographyReprojectionThreshold,
                        ref refinedMatches,
                        ref m_refinedHomography,
                        HomographyMethods.LMedS);
                    if (!homographyFound)
                    {
                        return(false);
                    }
                    info.homography = m_roughHomography * m_refinedHomography;
                    Cv2.WarpPerspective(m_grayImg, m_warpedImg, info.homography, m_pattern.size, InterpolationFlags.WarpInverseMap | InterpolationFlags.Cubic);
                    Cv2.ImShow("third img", m_warpedImg);
                    Console.WriteLine(m_pattern.points2d.Length);
                    using (Mat src = new Mat(m_pattern.points2d.Length, 1, MatType.CV_32FC2, m_pattern.points2d))
                        using (Mat dst = new Mat())
                        {
                            Cv2.PerspectiveTransform(src, dst, info.homography);
                            Point2f[] dstArray = new Point2f[dst.Rows * dst.Cols];
                            dst.GetArray(out dstArray);
                            for (int j = 0; j < dstArray.Length; j++)
                            {
                                Console.WriteLine(dstArray[j]);
                            }
                            Point2d[] result = Array.ConvertAll(dstArray, new Converter <Point2f, Point2d>(Point2fToPoint2d));
                            for (int j = 0; j < result.Length; j++)
                            {
                                Console.WriteLine(result[j]);
                            }
                            info.points2d = new Mat(result.Length, 1, MatType.CV_32FC2, result);
                            //for (int j = 0; j < info.points2d.Rows; j++)
                            //    Console.WriteLine(info.points2d.Row(j));
                            var s = "break point";
                            //return result;
                        }
                }
                else
                {
                    info.homography = m_roughHomography;

                    using (Mat src = new Mat(m_pattern.points2d.Length, 1, MatType.CV_32FC2, m_pattern.points2d))
                        using (Mat dst = new Mat())
                        {
                            Cv2.PerspectiveTransform(src, dst, info.homography);
                            Point2f[] dstArray = new Point2f[dst.Rows * dst.Cols];
                            dst.GetArray(out dstArray);
                            Point2d[] result = Array.ConvertAll(dstArray, new Converter <Point2f, Point2d>(Point2fToPoint2d));
                            info.points2d = new Mat(result.Length, 1, MatType.CV_32FC2, result);
                        }
                }
            }



            return(homographyFound);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Finds the pattern.
        /// </summary>
        /// <returns><c>true</c>, if pattern was found, <c>false</c> otherwise.</returns>
        /// <param name="image">Image.</param>
        /// <param name="info">Info.</param>
        public bool findPattern(Mat image, PatternTrackingInfo info)
        {
            // Convert input image to gray
            getGray(image, m_grayImg);

            // Extract feature points from input gray image
            extractFeatures(m_grayImg, m_queryKeypoints, m_queryDescriptors);

            // Get matches with current pattern
            getMatches(m_queryDescriptors, m_matches);

            //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));


            // Find homography transformation and detect good matches
            bool homographyFound = refineMatchesWithHomography(
                m_queryKeypoints,
                m_pattern.keypoints,
                homographyReprojectionThreshold,
                m_matches,
                m_roughHomography);

            if (homographyFound)
            {
                //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));

                // If homography refinement enabled improve found transformation
                if (enableHomographyRefinement)
                {
                    // Warp image using found homography
                    Imgproc.warpPerspective(m_grayImg, m_warpedImg, m_roughHomography, m_pattern.size, Imgproc.WARP_INVERSE_MAP | Imgproc.INTER_CUBIC);


                    //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(m_warpedImg);

                    // Get refined matches:
                    using (MatOfKeyPoint warpedKeypoints = new MatOfKeyPoint())
                        using (MatOfDMatch refinedMatches = new MatOfDMatch())
                        {
                            // Detect features on warped image
                            extractFeatures(m_warpedImg, warpedKeypoints, m_queryDescriptors);

                            // Match with pattern
                            getMatches(m_queryDescriptors, refinedMatches);

                            // Estimate new refinement homography
                            homographyFound = refineMatchesWithHomography(
                                warpedKeypoints,
                                m_pattern.keypoints,
                                homographyReprojectionThreshold,
                                refinedMatches,
                                m_refinedHomography);
                        }

                    //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(DebugHelpers.getMatchesImage(m_warpedImg, m_pattern.grayImg, warpedKeypoints, m_pattern.keypoints, refinedMatches, 100));

                    // Get a result homography as result of matrix product of refined and rough homographies:
                    //                              info.homography = m_roughHomography * m_refinedHomography;
                    Core.gemm(m_roughHomography, m_refinedHomography, 1, new Mat(), 0, info.homography);

                    //Debug.Log ("info.homography " + info.homography.ToString ());

                    // Transform contour with rough homography

                    //Core.perspectiveTransform (m_pattern.points2d, info.points2d, m_roughHomography);
                    //info.draw2dContour (image, new Scalar (200, 0, 0, 255));


                    // Transform contour with precise homography

                    Core.perspectiveTransform(m_pattern.points2d, info.points2d, info.homography);

                    //info.draw2dContour (image, new Scalar (0, 200, 0, 255));
                }
                else
                {
                    info.homography = m_roughHomography;

                    //Debug.Log ("m_roughHomography " + m_roughHomography.ToString ());
                    //Debug.Log ("info.homography " + info.homography.ToString ());

                    // Transform contour with rough homography
                    Core.perspectiveTransform(m_pattern.points2d, info.points2d, m_roughHomography);

                    //info.draw2dContour (image, new Scalar (0, 200, 0, 255));
                }
            }

            //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
            //Debug.Log ("Features:" + m_queryKeypoints.ToString () + " Matches: " + m_matches.ToString ());

            return(homographyFound);
        }