Esempio n. 1
0
        public static void GetMatches(VectorOfKeyPoint imageKeypoints, IInputArray imageDescriptors, VectorOfKeyPoint patternKeypoints, IInputArray patternDescriptors, out VectorOfVectorOfDMatch matches, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            homography = null;

            matches = new VectorOfVectorOfDMatch();

            var matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(patternDescriptors);
            matcher.KnnMatch(imageDescriptors, matches, k, null);

            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(patternKeypoints, imageKeypoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(patternKeypoints, imageKeypoints, matches, mask, 2);
                }
            }
        }
Esempio n. 2
0
        void Application_Idle(object sender, EventArgs e)
        {
            if (vc != null && !pause)
            {
                SIFT detector = new SIFT();

                Emgu.CV.Util.VectorOfKeyPoint keypoints = new Emgu.CV.Util.VectorOfKeyPoint();

                vc.Read(frame);
                System.Threading.Thread.Sleep((int)(1000.0 / rate - 5));
                //imageBox1.Image = frame;

                frLbl.Text = rate.ToString();
                cfLbl.Text = currentFrame.ToString();
                fcLbl.Text = frameCount.ToString();

                vc.Read(frame);
                imageBox1.Image = frame;
                //detector.Detect(frame);
                detector.DetectRaw(frame, keypoints);
                numOfKeyPoints = keypoints.Size;
                kpLbl.Text     = numOfKeyPoints.ToString();
                Features2DToolbox.DrawKeypoints(frame, keypoints, siftFrame, new Bgr(Color.Blue));
                imageBox2.Image = siftFrame;
                GC.Collect();

                currentFrame++;

                if (currentFrame >= frameCount)
                {
                    pause           = true;
                    button4.Enabled = false;
                }
            }
        }
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="recPoints">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long recPoints)
        {
            HomographyMatrix homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;

            FindMatch(modelImage, observedImage, out recPoints, out modelKeyPoints, out observedKeyPoints, out indices, out mask, out homography);

            //Draw the matched keypoints

            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(0, 255, 0), new Bgr(0, 0, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);


            #region draw the projected region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);
                double mozan = mask.Sum;
                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Blue), 5);
                recPoints = Convert.ToInt64(mozan);
            }
            #endregion

            return(result);
        }
Esempio n. 4
0
        public bool FindPattern(Mat image)
        {
            VectorOfKeyPoint keypoints;
            Mat descriptors;

            var gray = GetGray(image);

            FeaturesUtils.ExtractFeatures(gray, out keypoints, out descriptors);

            Features2DToolbox.DrawKeypoints(gray, keypoints, image, new Bgr(Color.Red), Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            VectorOfVectorOfDMatch matches;
            Mat homography;

            FeaturesUtils.GetMatches(keypoints, descriptors, _pattern.Keypoints, _pattern.Descriptors, out matches, out homography);

            _patternInfo.Homography = homography;

            var pts = Array.ConvertAll <Point, PointF>(_pattern.Points2d.ToArray(), a => a);

            pts = CvInvoke.PerspectiveTransform(pts, homography);
            var points = Array.ConvertAll(pts, Point.Round);

            _patternInfo.Points2d = new VectorOfPoint(points);

            _patternInfo.Draw2dContour(image, new MCvScalar(0, 200, 0));

            return(true);
        }
        private Mat find_ball()
        {
            MCvScalar orangeMin = new MCvScalar(0, 0, 212);     //10 120 100
            MCvScalar orangeMax = new MCvScalar(131, 255, 255); //70 255 255

            Mat arr = new Mat();

            Mat img    = _frame;
            Mat hsvImg = new Mat();

            CvInvoke.CvtColor(img, hsvImg, ColorConversion.Bgr2Hsv);
            CvInvoke.InRange(hsvImg, new ScalarArray(orangeMin), new ScalarArray(orangeMax),
                             hsvImg);
            //CvInvoke.MorphologyEx(hsvImg, hsvImg, MorphOp.Close, new Mat(), new System.Drawing.Point(-1, -1), 5, BorderType.Default, new MCvScalar());
            SimpleBlobDetectorParams param = new SimpleBlobDetectorParams();

            param.FilterByCircularity = false;
            param.FilterByConvexity   = false;
            param.FilterByInertia     = false;
            param.FilterByColor       = false;
            param.MinArea             = 800;
            param.MaxArea             = 5000;
            SimpleBlobDetector detector = new SimpleBlobDetector(param);

            MKeyPoint[] keypoints = detector.Detect(hsvImg);
            Features2DToolbox.DrawKeypoints(img, new VectorOfKeyPoint(keypoints), img, new
                                            Bgr(255, 0, 0), Features2DToolbox.KeypointDrawType.DrawRichKeypoints);

            foreach (var item in keypoints)
            {
                if ((int)item.Point.X > x_min && (int)item.Point.X < x_max && (int)item.Point.Y > y_min && (int)item.Point.Y < y_max)
                {
                    centerX = (int)item.Point.X;
                    centerY = (int)item.Point.Y;
                }
                else
                {
                    centerX = dX;
                    centerY = dY;

                    total_error_x = 0;

                    total_error_y = 0;
                }
            }
            if (keypoints.Length == 0)
            {
                centerX = dX;
                centerY = dY;

                total_error_x = 0;

                total_error_y = 0;
            }

            lbl_x.Content = "Center X: " + centerX;
            lbl_y.Content = "Center Y: " + centerY;

            return(img);
        }
Esempio n. 6
0
        public Image <Bgr, Byte> Draw(Image <Bgr, Byte> modelImage, Image <Bgr, Byte> observedImage, List <KeyFrame> keyframes = null)
        {
            //FindMatch(modelImage, observedImage,keyframes);

            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            #region draw the projected region on the image
            if (homography != null)
            {
                //draw a rectangle along the projected model
                Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                PointF[]  pts  = new PointF[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                //pts = CvInvoke.PerspectiveTransform(pts, homography);

                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                }
            }
            #endregion
            //modelImage.Dispose();
            return(result.ToImage <Bgr, byte>());
        }
Esempio n. 7
0
        public Image <Bgr, byte> PointComp(Image <Bgr, byte> image, Image <Bgr, byte> image2)
        {
            Image <Gray, byte> baseImgGray    = image.Convert <Gray, byte>();
            Image <Gray, byte> twistedImgGray = image2.Convert <Gray, byte>();
            Brisk            descriptor       = new Brisk();
            GFTTDetector     detector         = new GFTTDetector(40, 0.01, 5, 3, true);
            VectorOfKeyPoint GFP1             = new VectorOfKeyPoint();
            UMat             baseDesc         = new UMat();
            UMat             bimg             = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2             = new VectorOfKeyPoint();
            UMat             twistedDesc      = new UMat();
            UMat             timg             = baseImgGray.Mat.GetUMat(AccessType.Read);

            detector.DetectRaw(bimg, GFP1);
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            matcher.Add(baseDesc);
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            int nonZeroCount      = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);
            Image <Bgr, byte> res = image.CopyBlank();

            Features2DToolbox.DrawMatches(image2, GFP1, image, GFP2, matches, res, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), mask);
            return(res);
        }
Esempio n. 8
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public Bitmap GetImageWithDrawnMatches(Bitmap modelImage, Bitmap observedImage, MatchingTechnique matchingTechnique)
        {
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (Image <Bgr, byte> modelImg = new Image <Bgr, byte>(modelImage))
                using (Image <Bgr, byte> observedImg = new Image <Bgr, byte>(observedImage))
                    using (Emgu.CV.Mat modelMat = modelImg.Mat)
                        using (Emgu.CV.Mat observedMat = observedImg.Mat)
                            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                            {
                                ImageFeatureDetector.FindMatches(modelMat, observedMat, out modelKeyPoints, out observedKeyPoints, matches, out Mat mask, out Mat homography, matchingTechnique);

                                try
                                {
                                    using (Mat result = new Mat())
                                    {
                                        Features2DToolbox.DrawMatches(modelMat, modelKeyPoints, observedMat, observedKeyPoints, matches, result, new MCvScalar(255, 0, 0), new MCvScalar(0, 0, 255), mask);

                                        return(result.ToBitmap());
                                    }
                                }
                                catch (Exception)
                                {
                                    throw;
                                }
                                finally
                                {
                                    mask?.Dispose();
                                    homography?.Dispose();
                                }
                            }
        }
Esempio n. 9
0
        /// <summary>
        /// Draws data objects onto image and presents
        /// (for selected data objects in data grid).
        /// </summary>
        /// <param name="objects">The data objects.</param>
        public void DrawObjects(IList objects)
        {
            // clone last image from engine vm
            var image = EngineVm.Image.Clone();

            // create MKeyPoint[] if
            // of type KeyPoint
            var keypoints = objects
                            .OfType <KeyPoint>()
                            .Select(o => o.GetKeyPoint())
                            .ToArray();

            // draw selected keypoints
            Features2DToolbox
            .DrawKeypoints(
                image,
                new VectorOfKeyPoint(keypoints),
                image,
                new Bgr(Color.Red),
                Features2DToolbox.KeypointDrawType.DrawRichKeypoints);

            // draw selected boxes
            foreach (var obj in objects.OfType <Box>())
            {
                image.Draw(obj.GetBox(), new Bgr(Color.Red));
            }

            // draw selected circles
            foreach (var obj in objects.OfType <Circle>())
            {
                image.Draw(obj.GetCircle(), new Bgr(Color.Red));
            }

            // draw selected contours
            foreach (var obj in objects.OfType <Contour>())
            {
                image.Draw(obj.GetContour(), new Bgr(Color.Red));
            }

            // draw selected ortated boxes
            foreach (var obj in objects.OfType <RotatedBox>())
            {
                image.Draw(obj.GetBox(), new Bgr(Color.Red), 1);
            }

            // draw selected segments
            foreach (var obj in objects.OfType <Segment>())
            {
                image.Draw(obj.GetSegment(), new Bgr(Color.Red), 1);
            }

            // draw selected ellipses
            foreach (var obj in objects.OfType <RotBoxEllipse>())
            {
                image.Draw(obj.GetEllipse(), new Bgr(Color.Red));
            }

            // present the annoated image
            SetImage(image);
        }
Esempio n. 10
0
        public static void FindMatch(string modelFileName, string observedFileName, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();
            {
                using (UMat uModelImage = CvInvoke.Imread(modelFileName, ImreadModes.Color).GetUMat(AccessType.Read))
                    using (UMat uObservedImage = CvInvoke.Imread(observedFileName, ImreadModes.Color).GetUMat(AccessType.Read))
                    {
                        SIFT sift             = new SIFT();
                        UMat modelDescriptors = new UMat();
                        sift.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        UMat observedDescriptors = new UMat();
                        sift.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                        BFMatcher matcher = new BFMatcher(DistanceType.L2);
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    }
            }
        }
        //丟入對比圖,還有對比時間
        //可以算出結果
        public Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime)
        {
            //同形矩陣
            HomographyMatrix homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;

            FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, out indices, out mask, out homography);//會丟出尋找時間

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                //表示有對比到結過
                //System.Windows.Forms.MessageBox.Show("Match! ");
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);
                Console.WriteLine("width : " + rect.Right + "height : " + rect.Bottom + "\n" + pts.Length + "Up : " + pts[0].X + "," + pts[0].Y + "\n Down : " + "Up : " + pts[1].X + "," + pts[1].Y + "\n Left : " + "Up : " + pts[2].X + "," + pts[2].Y + "\n right : " + "Up : " + pts[3].X + "," + pts[3].Y);
                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            return(result);
        }
        //畫出兩個image一樣的地方
        //不代表image裡面有相同的物件
        public Image <Bgr, Byte> DrawTwoImageMatchPoint(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints, Matrix <byte> mask, Matrix <int> indices)
        {
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            return(result);
        }
Esempio n. 13
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;
            double hessianThresh       = 100;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    SIFT siftCPU = new SIFT();


                    //extract features from the object image
                    UMat modelDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    siftCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    siftCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));

                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    //Features2DToolbox.VoteForUniqueness(matches, 1, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Esempio n. 14
0
        private void FindKeypoint(int ID, InputFileModel inputFile, IFeatureDetector detector, bool AddToList = true)
        {
            WindowsFormHelper.AddLogToConsole($"Start finding key points for: {inputFile.fileInfo.Name.ToString()}\n");

            var detectedKeyPoints = detector.DetectKeyPoints(new Mat(inputFile.fileInfo.FullName));

            if (AddToList)
            {
                DetectedKeyPoints.Add(ID, new KeyPointModel()
                {
                    DetectedKeyPoints = new VectorOfKeyPoint(detectedKeyPoints),
                    InputFile         = inputFile,
                    ID = ID
                }
                                      );
            }

            WindowsFormHelper.AddLogToConsole($"FINISH finding key points for: {inputFile.fileInfo.Name.ToString()}\n");


            // Save drawing image
            Mat output = new Mat();

            Directory.CreateDirectory($@"{tempDirectory}\DrawKeypoint");
            Features2DToolbox.DrawKeypoints(new Mat(inputFile.fileInfo.FullName), new VectorOfKeyPoint(detectedKeyPoints), output, new Bgr(0, 0, 255), KeypointDrawType.DrawRichKeypoints);
            output.Save(Path.Combine($@"{tempDirectory}\DrawKeypoint", $"{Path.GetFileNameWithoutExtension(inputFile.fileInfo.Name)}.JPG"));
            fileManager.listViewerModel._lastDrawnKeypoint = new Image <Bgr, byte>(output.Bitmap);

            var file       = new InputFileModel(Path.Combine($@"{tempDirectory}\DrawKeypoint", $"{Path.GetFileNameWithoutExtension(inputFile.fileInfo.Name)}.JPG"));
            var imageList  = _winForm.ImageList[(int)EListViewGroup.DrawnKeyPoint];
            var listViewer = _winForm.ListViews[(int)EListViewGroup.DrawnKeyPoint];

            fileManager.AddInputFileToList(file, fileManager.listViewerModel.ListOfListInputFolder[(int)EListViewGroup.DrawnKeyPoint], imageList, listViewer);
        }
        /// <summary>
        /// Draws the matches and homography
        /// </summary>
        /// <param name="knownSign"> known sign </param>
        /// <param name="candidate"> candidate </param>
        /// <param name="signKp"> sign keypoints </param>
        /// <param name="candKp"> candidate keypints </param>
        /// <param name="match"> matches </param>
        /// <returns> resulting image </returns>
        public static Image <Bgr, byte> Draw(Image <Bgr, byte> knownSign, Image <Bgr, byte> candidate, VectorOfKeyPoint signKp, VectorOfKeyPoint candKp, VectorOfVectorOfDMatch match)
        {
            Mat homography;

            //Draw the matched keypoints
            Mat result = new Mat();

            Features2DToolbox.DrawMatches(knownSign, signKp, candidate, candKp,
                                          match, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), null);
            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(signKp, candKp, match, null, 2);
            if (homography != null)
            {
                //draw a rectangle along the projected model
                Rectangle rect = new Rectangle(Point.Empty, knownSign.Size);
                PointF[]  pts  = new PointF[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                pts = CvInvoke.PerspectiveTransform(pts, homography);


                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                }
            }
            return(result.ToImage <Bgr, byte>());
        }
Esempio n. 16
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        private static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime, out double matchPercentage)
        {
            matchPercentage = 0.0;
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography);

                var difference = Math.Abs(modelKeyPoints.Size - observedKeyPoints.Size);
                if (modelKeyPoints.Size > 0)
                {
                    matchPercentage = 100.0 * ((double)difference / (double)modelKeyPoints.Size);
                    //MessageBox.Show(String.Format("The images are {0}% different", matchPercentage));
                }
                else
                {
                    MessageBox.Show(String.Format("No keypoints in model image. Must be a blank image"));
                }

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

#if NETFX_CORE
                    Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round);
#else
                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
#endif
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }
                #endregion

                return(result);
            }
        }
Esempio n. 17
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out bool goodMatch)
        {
            goodMatch = false;

            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // Bruteforce, slower but more accurate
                    // You can use KDTree for faster matching with slight loss in accuracy
                    using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                        using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                               matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                              observedKeyPoints, matches, mask, 2);
                                        goodMatch = nonZeroCount >= 7 && nonZeroCount >= observedKeyPoints.Size / 7;
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Esempio n. 18
0
        private void button3_Click(object sender, EventArgs e)
        {
            GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true);

            var baseImgGray    = baseImg.Convert <Gray, byte>();
            var twistedImgGray = twistedImg.Convert <Gray, byte>();

            //генератор описания ключевых точек
            Brisk descriptor = new Brisk();

            //поскольку в данном случае необходимо посчитать обратное преобразование
            //базой будет являться изменённое изображение
            VectorOfKeyPoint GFP1     = new VectorOfKeyPoint();
            UMat             baseDesc = new UMat();
            UMat             bimg     = twistedImgGray.Mat.GetUMat(AccessType.Read);

            VectorOfKeyPoint GFP2        = new VectorOfKeyPoint();
            UMat             twistedDesc = new UMat();
            UMat             timg        = baseImgGray.Mat.GetUMat(AccessType.Read);

            //получение необработанной информации о характерных точках изображений
            detector.DetectRaw(bimg, GFP1);

            //генерация описания характерных точек изображений
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);

            //класс позволяющий сравнивать описания наборов ключевых точек
            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            //массив для хранения совпадений характерных точек
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            //добавление описания базовых точек
            matcher.Add(baseDesc);
            //сравнение с описанием изменённых
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            //3й параметр - количество ближайших соседей среди которых осуществляется поиск совпадений
            //4й параметр - маска, в данном случае не нужна

            //маска для определения отбрасываемых значений (аномальных и не уникальных)
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            //определение уникальных совпадений
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

            Mat homography;

            //получение матрицы гомографии
            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(GFP1, GFP2, matches, mask, 2);

            var destImage = new Image <Bgr, byte>(baseImg.Size);

            CvInvoke.WarpPerspective(twistedImg, destImage, homography, destImage.Size);
            twistedImg      = destImage;
            imageBox2.Image = destImage.Resize(640, 480, Inter.Linear);
        }
Esempio n. 19
0
        /// <summary>
        /// 畫上特徵點到圖上
        /// </summary>
        /// <param name="surf">SURF特徵類別</param>
        /// <returns>回傳畫好特徵點的影像</returns>
        public static Image <Bgr, byte> DrawSURFFeature(SURFFeatureData surf)
        {
            VectorOfKeyPoint keyPoints = surf.GetKeyPoints();
            //繪製特徵
            Image <Bgr, byte> result = Features2DToolbox.DrawKeypoints(surf.GetImg(), surf.GetKeyPoints(), new Bgr(255, 255, 255), Features2DToolbox.KeypointDrawType.DEFAULT);

            return(result);
        }
Esempio n. 20
0
        public void SURFDraw(Mat image, Mat testImage)
        {
            VectorOfKeyPoint keyPoint = new VectorOfKeyPoint();
            SURF             surfCPU  = new SURF(500, 4, 2, true, false);

            surfCPU.DetectRaw(image, keyPoint);
            Features2DToolbox.DrawKeypoints(image, keyPoint, testImage, new Bgr(Color.Red), Features2DToolbox.KeypointDrawType.Default);
        }
Esempio n. 21
0
        public bool Match(IImageLocalFeatures feature1, IImageLocalFeatures feature2)
        {
            bool                      match = false;
            int                       k     = 2;
            double                    uniquenessThreshold = 0.8;
            Matrix <byte>             mask;
            Matrix <int>              indices;
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(_distanceType);

            matcher.Add(feature1.Descriptors);

            indices = new Matrix <int>(feature2.Descriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(feature2.Descriptors.Rows, k))
            {
                matcher.KnnMatch(feature2.Descriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 25)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(feature1.KeyPoints, feature2.KeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 6)
                {
                    _homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(feature1.KeyPoints, feature2.KeyPoints, indices, mask, 2);
                    match       = true;
                }
            }

            /*
             * _result = Features2DToolbox.DrawMatches(feature1.Image, feature1.KeyPoints, feature2.Image, feature2.KeyPoints,
             *                                      indices,
             *                                      new Bgr(255, 0, 0),
             *                                      new Bgr(255, 255, 255),
             *                                      mask,
             *                                      Features2DToolbox.KeypointDrawType.DEFAULT);
             *
             * if (_homography != null)
             * {
             *  Rectangle rect = feature2.Image.ROI;
             *  PointF[] pts = new PointF[]
             *  {
             *      new PointF(rect.Left, rect.Bottom),
             *      new PointF(rect.Right, rect.Bottom),
             *      new PointF(rect.Right, rect.Top),
             *      new PointF(rect.Left, rect.Top)
             *  };
             *
             *  _homography.ProjectPoints(pts);
             *  _result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
             * }
             */

            return(match);
        }
Esempio n. 22
0
        private static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, SurfSettings surfSettings, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;

            if (surfSettings != null)
            {
                hessianThresh       = surfSettings.HessianThresh.Value;
                uniquenessThreshold = surfSettings.UniquenessThreshold.Value;
            }

            SURFDetector surfCPU = new SURFDetector(hessianThresh, false);
            #endregion



            int       k = 2;
            Stopwatch watch;
            homography = null;


            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            matchTime = watch.ElapsedMilliseconds;
        }
Esempio n. 23
0
        public static Image <Bgr, Byte> DrawFeatures(Image <Gray, Byte> modelImage)
        {
            SURFDetector      surfCPU          = new SURFDetector(500, false);
            VectorOfKeyPoint  modelKeyPoints   = new VectorOfKeyPoint();
            Matrix <float>    modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);
            Image <Bgr, Byte> result           = Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, new Bgr(0, 0, 255), Features2DToolbox.KeypointDrawType.DEFAULT);

            return(result);
        }
        public void SIFTDraw(Mat image, Mat testImage)
        {
            SIFT             siftCPU  = new SIFT();
            VectorOfKeyPoint keyPoint = new VectorOfKeyPoint();

            siftCPU.DetectRaw(image, keyPoint);

            Features2DToolbox.DrawKeypoints(image, keyPoint, testImage, new Bgr(Color.GreenYellow), Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);
        }
        public List <int> DetectBanknotesResults(ref Mat imageEval)
        {
            var           imageBackup       = imageEval.Clone();
            List <Result> detectorResultOut = DetectBanknotesTrain(imageEval);
            List <int>    results           = new List <int>();
            var           total             = 0.0;

            for (int i = 0; i < detectorResultOut.Count; ++i)
            {
                Result detectorResult = detectorResultOut[i];
                results.Add(detectorResult.GetTrainValue());

                Features2DToolbox.DrawKeypoints(imageEval, detectorResult.GetInliersKeypoints(), imageEval, new Bgr(0, 255, 0), Features2DToolbox.KeypointDrawType.Default);

                var valorTexto = detectorResult.GetTrainValue().ToString();

                //Message in language portuguese
                _synthesizer.SpeakAsync(valorTexto + "reais");

                total += Double.Parse(valorTexto);

                Mat imageMatchesSingle = new Mat();
                imageMatchesSingle = imageBackup;



                Mat matchesInliers = detectorResult.getInliersMatches(ref imageMatchesSingle);


                Rectangle boundingBox = CvInvoke.BoundingRectangle(detectorResult.GetTrainContour());
                _util.CorrectBoundingBox(ref boundingBox, imageEval.Cols, imageEval.Rows);
                InterfaceUtil.DrawLabelInCenterOfROI(valorTexto, ref imageEval, ref boundingBox);
                InterfaceUtil.DrawLabelInCenterOfROI(valorTexto, ref matchesInliers, ref boundingBox);
                _util.DrawContour(ref imageEval, detectorResult.GetTrainContour(), detectorResult.GetTrainContourColor(), 2);
                _util.DrawContour(ref matchesInliers, detectorResult.GetTrainContour(), detectorResult.GetTrainContourColor(), 2);
            }

            if (total == 0)
            {
                //Message in language portuguese
                _synthesizer.SpeakAsync("Nenhuma cédula foi identificada");
            }
            else
            {
                ImageViewer iv = new ImageViewer();

                iv = new ImageViewer(imageEval, "Result ");
                iv.Show();

                //Message in language portuguese
                _synthesizer.SpeakAsync("Valor total é " + total + " reais");
            }

            results.Sort();

            return(results);
        }
Esempio n. 26
0
        public Mat Draw(Mat modelImage, Mat observedImage, out long matchTime, double surfHessianTresh, out Mat homography)
        {
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography, surfHessianTresh);

                string temp = Helper.GetFileName("Trazenje deskriptora");
                modelImage.Save(temp);


                //Draw the matched keypoints
                Mat result = new Mat();

                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);


                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }
                #endregion

                //kad je ovo zakomentirano algoritam radi!
                //Pokusaj panorame - 15.1.2017

                //MakeMosaic(homography, mmodelImage, observedImage);

                modelKeyPoints.Dispose();
                observedKeyPoints.Dispose();

                return(result);
            }
        }
Esempio n. 27
0
        private void CaptureOnImageGrabbed(object sender, EventArgs eventArgs)
        {
            var capture = (Capture)sender;

            //Show time stamp
            double timeIndex = capture.GetCaptureProperty(CapProp.PosMsec);

            ProgressTime = TimeSpan.FromMilliseconds(timeIndex).ToString("g");

            //show frame number
            double frameNumber = capture.GetCaptureProperty(CapProp.PosFrames);
            double totalFrames = capture.GetCaptureProperty(CapProp.FrameCount);

            _progress = frameNumber / totalFrames;
            RaisePropertyChanged("Progress");

            // Show image with keyPoints
            var frame = new Mat();

            _capture.Retrieve(frame);
            var keyFeatures = _projectFile.Model.GetKeyFeatures((int)frameNumber - 1);

            var imageFrame = new Mat();

            Features2DToolbox.DrawKeypoints(frame, keyFeatures, imageFrame, new Bgr(Color.DarkBlue),
                                            Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            if (frameNumber > 1)
            {
                var matches = _projectFile.Model.GetMatches((int)frameNumber - 1);
                foreach (var match in matches)
                {
                    CvInvoke.Line(imageFrame,
                                  Point.Round(match.Item1.Point),
                                  Point.Round(match.Item2.Point),
                                  new Bgr(Color.Red).MCvScalar,
                                  2);
                }
            }

            OriginImage = VideoImageSource = imageFrame;

            //Wait to display correct framerate
            var frameRate = capture.GetCaptureProperty(CapProp.Fps);
            var rightElapsedMilliseconds = 1000.0 / frameRate;
            var realElapsedMilliseconds  = _stopwatch.ElapsedMilliseconds;
            var waitingMilliseconds      = Math.Max(0, rightElapsedMilliseconds - realElapsedMilliseconds);

            Thread.Sleep((int)waitingMilliseconds);
            _stopwatch.Restart();

            if (frameNumber == totalFrames)
            {
                Stop();
            }
        }
Esempio n. 28
0
        public static int SiftComparison(string img1, string img2)
        {
            var sift = new Emgu.CV.XFeatures2D.SIFT();

            var modelKeyPoints   = new VectorOfKeyPoint();
            Mat modelDescriptors = new Mat();

            var observedKeyPoints   = new VectorOfKeyPoint();
            Mat observedDescriptors = new Mat();
            Mat mask = new Mat();

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
            int    k = 2;
            double uniquenessThreshold = 0.80;

            using (Mat modelImage = CvInvoke.Imread(img1, ImreadModes.Grayscale))
                using (Mat observedImage = CvInvoke.Imread(img2, ImreadModes.Grayscale))
                {
                    sift.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);
                    sift.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L1);

                    matcher.Add(modelDescriptors);
                    //matcher.Add(observedDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    try
                    {
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    }
                    catch (Exception ex)
                    {
                        Log(ex.Message);
                        Log("Error with SIFT algorithm, unable to compare images..");
                        return(0);
                    }
                }

            int score = 0;

            for (int i = 0; i < matches.Size; i++)
            {
                if (mask.GetData(i)[0] == 0)
                {
                    continue;
                }
                foreach (var e in matches[i].ToArray())
                {
                    ++score;
                }
            }

            return(score);
        }
Esempio n. 29
0
        public static Mat Draw(Mat modelImage, Mat observedImage)
        {
            var sift = new SIFT();

            var modelKeyPoints    = new VectorOfKeyPoint();
            var observedKeyPoints = new VectorOfKeyPoint();

            UMat modelDescriptors    = new UMat();
            UMat observedDescriptors = new UMat();

            sift.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);
            sift.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(modelDescriptors);

            var matches = new VectorOfVectorOfDMatch();

            matcher.KnnMatch(observedDescriptors, matches, 2, null);

            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);

            var homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 10);

            var result = new Mat();

            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, matches, result,
                                          new MCvScalar(255, 255, 255),
                                          new MCvScalar(0, 0, 0),
                                          mask,
                                          Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);

            PointF[] pts =
            {
                new PointF(rect.Left,  rect.Bottom),
                new PointF(rect.Right, rect.Bottom),
                new PointF(rect.Right, rect.Top),
                new PointF(rect.Left,  rect.Top)
            };
            pts = CvInvoke.PerspectiveTransform(pts, homography);

            Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
            using (VectorOfPoint vp = new VectorOfPoint(points))
            {
                CvInvoke.Polylines(result, vp, true, new MCvScalar(0, 255, 0, 55), 2);
            }

            return(result);
        }
Esempio n. 30
0
        private void extractFeatureButton_Click(object sender, EventArgs e)
        {
            trainingExtractSurfData = SURFMatch.CalSURFFeature(extractFeatureImage, new MCvSURFParams(500, false));
            //繪製特徵
            Image <Bgr, byte> result = Features2DToolbox.DrawKeypoints(trainingExtractSurfData.GetImg(), trainingExtractSurfData.GetKeyPoints(), new Bgr(255, 255, 255), Features2DToolbox.KeypointDrawType.DEFAULT);
            //顯示
            ImageViewer viewer = new ImageViewer(result, "Extracted Feature");

            viewer.Show();
        }