Ejemplo n.º 1
0
        public bool FindPattern(Mat image)
        {
            VectorOfKeyPoint keypoints;
            Mat descriptors;

            var gray = GetGray(image);

            FeaturesUtils.ExtractFeatures(gray, out keypoints, out descriptors);

            Features2DToolbox.DrawKeypoints(gray, keypoints, image, new Bgr(Color.Red), Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            VectorOfVectorOfDMatch matches;
            Mat homography;

            FeaturesUtils.GetMatches(keypoints, descriptors, _pattern.Keypoints, _pattern.Descriptors, out matches, out homography);

            _patternInfo.Homography = homography;

            var pts = Array.ConvertAll <Point, PointF>(_pattern.Points2d.ToArray(), a => a);

            pts = CvInvoke.PerspectiveTransform(pts, homography);
            var points = Array.ConvertAll(pts, Point.Round);

            _patternInfo.Points2d = new VectorOfPoint(points);

            _patternInfo.Draw2dContour(image, new MCvScalar(0, 200, 0));

            return(true);
        }
Ejemplo n.º 2
0
        protected Pattern BuildPatternFromImage(Mat image)
        {
            _pattern = new Pattern
            {
                Size     = new Size(image.Cols, image.Rows),
                Frame    = image.Clone(),
                GrayImg  = GetGray(image),
                Points2d = new VectorOfPoint(4),
                Points3d = new VectorOfPoint3D32F(4)
            };

            // Build 2d and 3d contours (3d contour lie in XY plane since it's planar)

            // Image dimensions
            int w = image.Cols;
            int h = image.Rows;

            // Normalized dimensions:
            float maxSize = Math.Max(w, h);
            float unitW   = w / maxSize;
            float unitH   = h / maxSize;

            _pattern.Points2d.Clear();
            _pattern.Points2d.Push(new[] {
                new Point(0, 0),
                new Point(w, 0),
                new Point(w, h),
                new Point(0, h)
            });

            _pattern.Points3d.Clear();
            _pattern.Points3d.Push(
                new[] {
                new MCvPoint3D32f(-unitW, -unitH, 0),
                new MCvPoint3D32f(unitW, -unitH, 0),
                new MCvPoint3D32f(unitW, unitH, 0),
                new MCvPoint3D32f(-unitW, unitH, 0)
            });

            VectorOfKeyPoint keypoints;
            Mat descriptors;

            FeaturesUtils.ExtractFeatures(_pattern.GrayImg, out keypoints, out descriptors);

            _pattern.Keypoints   = keypoints;
            _pattern.Descriptors = descriptors;

            return(_pattern);
        }
Ejemplo n.º 3
0
        public static void Run(string path, string patternPath, SourceType type)
        {
            FeaturesUtils.Init();

            var calibration = new CameraCalibrationInfo(560.764656335266f, 562.763179958161f, 295.849138757436f, 255.022208986073f);

            var patternImage    = CvInvoke.Imread(patternPath, Emgu.CV.CvEnum.LoadImageType.Unchanged);
            var patternDetector = new PatternDetector(patternImage);

            if (type == SourceType.Image)
            {
                var image = CvInvoke.Imread(path, Emgu.CV.CvEnum.LoadImageType.Unchanged);
                ShowWindow(image, patternImage, patternDetector, calibration);
            }
            else if (type == SourceType.Video)
            {
                var capture = new Capture(path);
                var image   = capture.QueryFrame();
                ShowWindow(image, patternImage, patternDetector, calibration, capture);
            }
        }