//畫出兩個image一樣的地方
        //不代表image裡面有相同的物件
        public Image <Bgr, Byte> DrawTwoImageMatchPoint(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints, Matrix <byte> mask, Matrix <int> indices)
        {
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            return(result);
        }
Exemple #2
0
        public Image <Bgr, byte> pointComp(Image <Bgr, byte> baseImg, Image <Bgr, byte> twistedImg)
        {
            Image <Gray, byte> baseImgGray    = baseImg.Convert <Gray, byte>();
            Image <Gray, byte> twistedImgGray = twistedImg.Convert <Gray, byte>();
            Brisk            descriptor       = new Brisk();
            GFTTDetector     detector         = new GFTTDetector(40, 0.01, 5, 3, true);
            VectorOfKeyPoint GFP1             = new VectorOfKeyPoint();
            UMat             baseDesc         = new UMat();
            UMat             bimg             = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2             = new VectorOfKeyPoint();
            UMat             twistedDesc      = new UMat();
            UMat             timg             = baseImgGray.Mat.GetUMat(AccessType.Read);

            detector.DetectRaw(bimg, GFP1);
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            matcher.Add(baseDesc);
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            //int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);
            Image <Bgr, byte> res = baseImg.CopyBlank();

            Features2DToolbox.DrawMatches(twistedImg, GFP1, baseImg, GFP2, matches, res, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), mask);
            return(res);
        }
        //丟入對比圖,還有對比時間
        //可以算出結果
        public Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime)
        {
            //同形矩陣
            HomographyMatrix homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;

            FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, out indices, out mask, out homography);//會丟出尋找時間

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                //表示有對比到結過
                //System.Windows.Forms.MessageBox.Show("Match! ");
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);
                Console.WriteLine("width : " + rect.Right + "height : " + rect.Bottom + "\n" + pts.Length + "Up : " + pts[0].X + "," + pts[0].Y + "\n Down : " + "Up : " + pts[1].X + "," + pts[1].Y + "\n Left : " + "Up : " + pts[2].X + "," + pts[2].Y + "\n right : " + "Up : " + pts[3].X + "," + pts[3].Y);
                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            return(result);
        }
Exemple #4
0
        public Image <Bgr, Byte> Draw(Image <Bgr, Byte> modelImage, Image <Bgr, Byte> observedImage, List <KeyFrame> keyframes = null)
        {
            //FindMatch(modelImage, observedImage, keyframes);

            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                          matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            #region draw the projected region on the image

            if (homography != null)
            {
                //draw a rectangle along the projected model
                Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                PointF[]  pts  = new PointF[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                //pts = CvInvoke.PerspectiveTransform(pts, homography);

                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                }
            }

            #endregion
            //modelImage.Dispose();
            return(result.ToImage <Bgr, Byte>());
        }
Exemple #5
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime)
        {
            HomographyMatrix homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;

            FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, out indices, out mask, out homography);

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            return(result);
        }
        /// <summary>
        /// Draws the matches and homography
        /// </summary>
        /// <param name="knownSign"> known sign </param>
        /// <param name="candidate"> candidate </param>
        /// <param name="signKp"> sign keypoints </param>
        /// <param name="candKp"> candidate keypints </param>
        /// <param name="match"> matches </param>
        /// <returns> resulting image </returns>
        public static Image <Bgr, byte> Draw(Image <Bgr, byte> knownSign, Image <Bgr, byte> candidate, VectorOfKeyPoint signKp, VectorOfKeyPoint candKp, VectorOfVectorOfDMatch match)
        {
            Mat homography;

            //Draw the matched keypoints
            Mat result = new Mat();

            Features2DToolbox.DrawMatches(knownSign, signKp, candidate, candKp,
                                          match, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), null);
            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(signKp, candKp, match, null, 2);
            if (homography != null)
            {
                //draw a rectangle along the projected model
                Rectangle rect = new Rectangle(Point.Empty, knownSign.Size);
                PointF[]  pts  = new PointF[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                pts = CvInvoke.PerspectiveTransform(pts, homography);


                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                }
            }
            return(result.ToImage <Bgr, byte>());
        }
Exemple #7
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public Bitmap GetImageWithDrawnMatches(Bitmap modelImage, Bitmap observedImage, MatchingTechnique matchingTechnique)
        {
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (Image <Bgr, byte> modelImg = new Image <Bgr, byte>(modelImage))
                using (Image <Bgr, byte> observedImg = new Image <Bgr, byte>(observedImage))
                    using (Emgu.CV.Mat modelMat = modelImg.Mat)
                        using (Emgu.CV.Mat observedMat = observedImg.Mat)
                            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                            {
                                ImageFeatureDetector.FindMatches(modelMat, observedMat, out modelKeyPoints, out observedKeyPoints, matches, out Mat mask, out Mat homography, matchingTechnique);

                                try
                                {
                                    using (Mat result = new Mat())
                                    {
                                        Features2DToolbox.DrawMatches(modelMat, modelKeyPoints, observedMat, observedKeyPoints, matches, result, new MCvScalar(255, 0, 0), new MCvScalar(0, 0, 255), mask);

                                        return(result.ToBitmap());
                                    }
                                }
                                catch (Exception)
                                {
                                    throw;
                                }
                                finally
                                {
                                    mask?.Dispose();
                                    homography?.Dispose();
                                }
                            }
        }
Exemple #8
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        private static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime, out double matchPercentage)
        {
            matchPercentage = 0.0;
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography);

                var difference = Math.Abs(modelKeyPoints.Size - observedKeyPoints.Size);
                if (modelKeyPoints.Size > 0)
                {
                    matchPercentage = 100.0 * ((double)difference / (double)modelKeyPoints.Size);
                    //MessageBox.Show(String.Format("The images are {0}% different", matchPercentage));
                }
                else
                {
                    MessageBox.Show(String.Format("No keypoints in model image. Must be a blank image"));
                }

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

#if NETFX_CORE
                    Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round);
#else
                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
#endif
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }
                #endregion

                return(result);
            }
        }
        public static Mat Draw(Mat modelImage, Mat observedImage)
        {
            var sift = new SIFT();

            var modelKeyPoints    = new VectorOfKeyPoint();
            var observedKeyPoints = new VectorOfKeyPoint();

            UMat modelDescriptors    = new UMat();
            UMat observedDescriptors = new UMat();

            sift.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);
            sift.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(modelDescriptors);

            var matches = new VectorOfVectorOfDMatch();

            matcher.KnnMatch(observedDescriptors, matches, 2, null);

            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);

            var homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 10);

            var result = new Mat();

            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, matches, result,
                                          new MCvScalar(255, 255, 255),
                                          new MCvScalar(0, 0, 0),
                                          mask,
                                          Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);

            PointF[] pts =
            {
                new PointF(rect.Left,  rect.Bottom),
                new PointF(rect.Right, rect.Bottom),
                new PointF(rect.Right, rect.Top),
                new PointF(rect.Left,  rect.Top)
            };
            pts = CvInvoke.PerspectiveTransform(pts, homography);

            Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
            using (VectorOfPoint vp = new VectorOfPoint(points))
            {
                CvInvoke.Polylines(result, vp, true, new MCvScalar(0, 255, 0, 55), 2);
            }

            return(result);
        }
        public Mat Draw(Mat modelImage, Mat observedImage, out long matchTime, double surfHessianTresh, out Mat homography)
        {
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography, surfHessianTresh);

                string temp = Helper.GetFileName("Trazenje deskriptora");
                modelImage.Save(temp);


                //Draw the matched keypoints
                Mat result = new Mat();

                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);


                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }
                #endregion

                //kad je ovo zakomentirano algoritam radi!
                //Pokusaj panorame - 15.1.2017

                //MakeMosaic(homography, mmodelImage, observedImage);

                modelKeyPoints.Dispose();
                observedKeyPoints.Dispose();

                return(result);
            }
        }
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime, out long score, out int iwidth, out int iheight, out Point p)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            iwidth  = 0;
            iheight = 0;
            p       = new Point(0, 0);
            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography, out score);

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)   //如果在图中找到了模板,就把它画出来
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

//#if NETFX_CORE
//               Point[] points = Extensions.ConvertAll<PointF, Point>(pts, Point.Round);
//#else
                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
//#endif
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }

                    iwidth  = Convert.ToInt32(GetDistance(points[0], points[1]));
                    iheight = Convert.ToInt32(GetDistance(points[0], points[3]));
                    p       = points[0];
                }
                #endregion

                //比较结果
                return(result);
            }
        }
Exemple #12
0
        private void button3_Click(object sender, EventArgs e)
        {
            GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true);

            var twistedImgGray = twistedImg.Convert <Gray, byte>();
            var baseImgGray    = sourceImage.Convert <Gray, byte>();


            //генератор описания ключевых точек
            Brisk descriptor = new Brisk();
            //поскольку в данном случае необходимо посчитать обратное преобразование
            //базой будет являться изменённое изображение
            VectorOfKeyPoint GFP1        = new VectorOfKeyPoint();
            UMat             baseDesc    = new UMat();
            UMat             bimg        = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2        = new VectorOfKeyPoint();
            UMat             twistedDesc = new UMat();
            UMat             timg        = baseImgGray.Mat.GetUMat(AccessType.Read);

            //получение необработанной информации о характерных точках изображений
            detector.DetectRaw(bimg, GFP1);
            //генерация описания характерных точек изображений
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2); descriptor.Compute(timg, GFP2, twistedDesc);

            //класс позволяющий сравнивать описания наборов ключевых точек
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            //массив для хранения совпадений характерных точек
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            //добавление описания базовых точек
            matcher.Add(baseDesc);
            //сравнение с описанием изменённых
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            //3й параметр - количество ближайших соседей среди которых осуществляется поиск совпадений //4й параметр - маска, в данном случае не нужна

            //маска для определения отбрасываемых значений (аномальных и не уникальных)
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            //определение уникальных совпадений
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

            Mat homography;

            //получение матрицы гомографии
            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(GFP1, GFP2, matches, mask, 2);
            destImage  = new Image <Bgr, byte>(sourceImage.Size);
            CvInvoke.WarpPerspective(twistedImg, destImage, homography, destImage.Size);

            Features2DToolbox.DrawMatches(twistedImg, GFP1, sourceImage, GFP2, matches, destImage, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), mask);

            imageBox2.Image = destImage.Resize(640, 480, Inter.Linear);
        }
Exemple #13
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography);

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 0, 0), new MCvScalar(0, 0, 255), mask);

                for (var i = 0; i < observedKeyPoints.Size; i++)
                {
                    CvInvoke.Circle(observedImage, new Point((int)observedKeyPoints[i].Point.X, (int)observedKeyPoints[i].Point.Y), 2, new MCvScalar(0, 0, 0), 2);
                }
                CvInvoke.Imshow("Observed", observedImage);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

#if NETFX_CORE
                    Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round);
#else
                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
#endif
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 2);
                    }
                }
                #endregion

                return(result);
            }
        }
Exemple #14
0
        public Image <Bgr, byte> ReturnCompared(out Image <Bgr, byte> def, out Image <Bgr, byte> twistdef)
        {
            var image      = sourceImage.Copy();
            var twistedImg = additionalImage.Copy();
            //генератор описания ключевых точек
            Brisk        descriptor = new Brisk();
            GFTTDetector detector   = new GFTTDetector(40, 0.01, 5, 3, true);
            //поскольку в данном случае необходимо посчитать обратное преобразование
            //базой будет являться изменённое изображение
            VectorOfKeyPoint GFP1           = new VectorOfKeyPoint();
            UMat             baseDesc       = new UMat();
            var              twistedImgGray = twistedImg.Convert <Gray, byte>();
            var              baseImgGray    = image.Convert <Gray, byte>();
            UMat             bimg           = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2           = new VectorOfKeyPoint();
            UMat             twistedDesc    = new UMat();
            UMat             timg           = baseImgGray.Mat.GetUMat(AccessType.Read);

            //получение необработанной информации о характерных точках изображений
            detector.DetectRaw(bimg, GFP1);
            //генерация описания характерных точек изображений
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);


            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            //массив для хранения совпадений характерных точек
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            //добавление описания базовых точек
            matcher.Add(baseDesc);
            //сравнение с описанием изменённых
            matcher.KnnMatch(twistedDesc, matches, 2, null);


            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            //определение уникальных совпадений
            Mat resM = new Mat(image.Height, image.Width * 2, DepthType.Cv8U, 3);
            var res  = resM.ToImage <Bgr, byte>();

            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);

            Features2DToolbox.DrawMatches(twistedImg, GFP1, image, GFP2, matches, res, new MCvScalar(255, 0,
                                                                                                     0), new MCvScalar(255, 0, 0), mask);
            def      = image;
            twistdef = twistedImg;
            return(res);
        }
Exemple #15
0
        public Tuple <Image <Bgr, byte>, HomographyMatrix> DrawHomography(Image <Gray, byte> model, Image <Gray, byte> observed, double uniquenessThreshold)
        {
            HomographyMatrix  homography = null;
            Image <Bgr, Byte> result     = observed.Convert <Bgr, byte>();

            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;
            int k = 2;

            modelKeyPoints = surfCPU.DetectKeyPointsRaw(model, null);             // Extract features from the object image
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints);

            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null);             // Extract features from the observed image

            if (modelKeyPoints.Size <= 0)
            {
                throw new System.ArgumentException("Can't find any keypoints in your model image!");
            }

            if (observedKeyPoints.Size > 0)
            {
                Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float> (DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int> (observedDescriptors.Rows, k);

                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte> (dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 10)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 10)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints,
                                                       indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            }

            return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography));
        }
Exemple #16
0
        //private void Matching()
        //{

        //    Image<Bgr, Byte> modelImage = new Image<Bgr, byte>(@"C:\Users\rkosharil\Desktop\Camera Reasearch (EMGU CV)\Matching\Matching\model\" + "1" + ".png");
        //    Image<Bgr, Byte> observedImage = new Image<Bgr, byte>(@"C:\Users\rkosharil\Desktop\Camera Reasearch (EMGU CV)\Matching\Matching\scene\" + "1" + ".png");
        //    Image<Bgr, Byte> imgshow = observedImage.Copy();

        //    double[] minValues, maxValues;
        //    Point[] minLocations, maxLocations;

        //    using (var result = observedImage.MatchTemplate(modelImage, Emgu.CV.CvEnum.TM_TYPE.CV_TM_SQDIFF_NORMED))
        //    {
        //        result.MinMax(out minValues, out maxValues, out minLocations, out maxLocations);

        //        if (maxValues[0] > 0.95)
        //        {
        //            var match = new Rectangle(maxLocations[0], modelImage.Size);
        //            imgshow.Draw(match, new Bgr(Color.Red), 3);
        //            //textBox1.Text = match.X.ToString();
        //            //textBox2.Text = match.Y.ToString();
        //        }
        //        else
        //        {
        //            MessageBox.Show("Match Not Detected");
        //        }
        //    }

        //    //pictureBox2.Image = imgshow.Bitmap;
        //    imageBox4.Image = imgshow;
        //}
        public static Mat Draw(Mat modelImage, Mat observedImage, int no)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out modelKeyPoints, out observedKeyPoints, matches, out mask, out homography);
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 0, 0), new MCvScalar(0, 0, 255), mask);

                if (homography != null)
                {
                    var imgWarped = new Mat();
                    CvInvoke.WarpPerspective(observedImage, imgWarped, homography, modelImage.Size, Inter.Linear, Warp.InverseMap);
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    var       pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    pts = CvInvoke.PerspectiveTransform(pts, homography);
                    var points = new Point[pts.Length];
                    for (int i = 0; i < points.Length; i++)
                    {
                        points[i] = Point.Round(pts[i]);
                    }
                    if (no == 1)
                    {
                        using (var vp = new VectorOfPoint(points))
                        {
                            CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                        }
                        return(result);
                    }
                    if (no == 2)
                    {
                        using (var vp = new VectorOfPoint(points))
                        {
                            CvInvoke.WarpPerspective(observedImage, result, homography, modelImage.Size, Inter.Linear, Warp.InverseMap);
                        }
                    }
                }
                return(result);
            }
        }
Exemple #17
0
        public static Mat TrackRect(Mat observedImage, ref float x, ref float y, ref float height, ref float width)
        {
            int iX      = (int)Math.Floor(x * observedImage.Width);
            int iY      = (int)Math.Floor(y * observedImage.Height);
            int iWidth  = (int)Math.Ceiling(width * observedImage.Width);
            int iHeight = (int)Math.Ceiling(height * observedImage.Height);
            var rect    = new Rectangle(iX, iY, iWidth, iHeight);
            var model   = observedImage.ToImage <Bgr, byte>();

            model.ROI = rect;

            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Mat  mask;
            long matchTime;
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            FindMatch(model.Mat, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                      out mask, out homography);

            Mat result = new Mat();

            Features2DToolbox.DrawMatches(model, modelKeyPoints, observedImage, observedKeyPoints,
                                          matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            #region draw the projected region on the image

            if (homography != null)
            {
                //draw a rectangle along the projected model
                rect = new Rectangle(Point.Empty, model.Size);
                PointF[] pts = new PointF[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                pts = CvInvoke.PerspectiveTransform(pts, homography);

                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                }
            }

            #endregion

            return(result);
        }
Exemple #18
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography);

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }

                #endregion
                var numberOfMatches = CountHowManyPairsExist(mask);
                if (numberOfMatches < 400)
                {
                    Console.WriteLine("Not a good match");
                }
                else
                {
                    Console.WriteLine("Good match");
                }
                return(result);
            }
        }
Exemple #19
0
        public static void DrawFeatures(Mat left, Mat right, MatchingResult match, double takeBest, ImageViewer macthedView)
        {
            Mat matchesImage = new Mat();
            VectorOfVectorOfDMatch matches2    = new VectorOfVectorOfDMatch();
            VectorOfKeyPoint       vectorOfKp2 = new VectorOfKeyPoint(match.LeftKps);
            VectorOfKeyPoint       vectorOfKp1 = new VectorOfKeyPoint(match.RightKps);

            matches2.Push(new VectorOfDMatch(match.Matches.ToArray().OrderBy((x) => x.Distance).Take((int)(match.Matches.Size * takeBest)).ToArray()));
            // Features2DToolbox.DrawMatches(left, vectorOfKp1, right, vectorOfKp2, matches2, matchesImage, new Bgr(Color.Red).MCvScalar, new Bgr(Color.Blue).MCvScalar);
            Features2DToolbox.DrawMatches(right, vectorOfKp1, left, vectorOfKp2, matches2, matchesImage, new Bgr(Color.Red).MCvScalar, new Bgr(Color.Blue).MCvScalar);

            macthedView.Source = ImageLoader.ImageSourceForBitmap(matchesImage.Bitmap);
        }
Exemple #20
0
        /// <summary>
        /// 顯示畫出匹配的視窗
        /// </summary>
        /// <param name="matchData">匹配後回傳的資料類別</param>
        /// <param name="observedScene">觀察景象特徵資料</param>
        public static void ShowSURFMatchForm(SURFMatchedData matchData, SURFFeatureData observedScene)
        {
            PointF[] matchPts = GetMatchBoundingBox(matchData.GetHomography(), matchData.GetTemplateSURFData());
            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(matchData.GetTemplateSURFData().GetImg(), matchData.GetTemplateSURFData().GetKeyPoints(), observedScene.GetImg(), observedScene.GetKeyPoints(),
                                                                     matchData.GetIndices(), new Bgr(255, 255, 255), new Bgr(255, 255, 255), matchData.GetMask(), Features2DToolbox.KeypointDrawType.DEFAULT);

            if (matchPts != null)
            {
                result.DrawPolyline(Array.ConvertAll <PointF, Point>(matchPts, Point.Round), true, new Bgr(Color.Red), 2);
            }
            new ImageViewer(result, "顯示匹配圖像").Show();
        }
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography);

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(System.Drawing.Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

#if NETFX_CORE
                    Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round);
#else
                    System.Drawing.Point[] points = Array.ConvertAll <PointF, System.Drawing.Point>(pts, System.Drawing.Point.Round);
#endif

                    koordinaterFunksjon(points);

                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(0, 0, 255, 255), 4); // Siste tallet bestemmer tjukkelsen på linjen som den tegner.
                                                                                                // Tallene før det definerer fargen og gjennomsiktighet, i formatet Bgra.
                    }
                }
                #endregion

                return(result);
            }
        }
        /// <summary>
        /// Method that draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="m_modelImage"> The model image. </param>
        /// <param name="m_observedImage"> The observed image. </param>
        /// <param name="d_matchTime"> The output total time for computing the homography matrix. </param>
        /// <param name="l_score"> The score of matching. </param>
        /// <returns> Method returns the model image and the observed image, the matched features and homography projection. </returns>
        public static Mat Draw(Mat m_modelImage, Mat m_observedImage, out double d_matchTime, out long l_score)
        {
            ErrInfLogger.LockInstance.InfoLog("Start of the Draw");

            Mat m_homography;
            VectorOfKeyPoint v_modelKeyPoints;
            VectorOfKeyPoint v_observedKeyPoints;

            using (VectorOfVectorOfDMatch v_matches = new VectorOfVectorOfDMatch())
            {
                Mat m_mask;
                FindMatch(m_modelImage, m_observedImage, out d_matchTime, out v_modelKeyPoints, out v_observedKeyPoints, v_matches,
                          out m_mask, out m_homography, out l_score);

                //Draw the matched keypoints
                Mat m_result = new Mat();
                Features2DToolbox.DrawMatches(m_modelImage, v_modelKeyPoints, m_observedImage, v_observedKeyPoints,
                                              v_matches, m_result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), m_mask);

                #region draw the projected region on the image

                if (m_homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle r_rect = new Rectangle(Point.Empty, m_modelImage.Size);
                    PointF[]  p_pts  = new PointF[]
                    {
                        new PointF(r_rect.Left, r_rect.Bottom),
                        new PointF(r_rect.Right, r_rect.Bottom),
                        new PointF(r_rect.Right, r_rect.Top),
                        new PointF(r_rect.Left, r_rect.Top)
                    };
                    p_pts = CvInvoke.PerspectiveTransform(p_pts, m_homography);

#if NETFX_CORE
                    Point[] points = Extensions.ConvertAll <PointF, Point>(p_pts, Point.Round);
#else
                    Point[] p_points = Array.ConvertAll <PointF, Point>(p_pts, Point.Round);
#endif
                    using (VectorOfPoint v_vp = new VectorOfPoint(p_points))
                    {
                        CvInvoke.Polylines(m_result, v_vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }
                #endregion

                ErrInfLogger.LockInstance.InfoLog("End of the Draw");
                return(m_result);
            }
        }
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>匹配特征模板
        /// <param name="observedImage">The observed image</param>被匹配图像
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime, out long score, out Point[] observedPoints)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            observedPoints = new Point[4];
            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography, out score);

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)   //如果在图中找到了模板,就把它画出来
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);
                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                    for (int i = 0; i < 4; i++)
                    {
                        observedPoints[i].X = points[i].X;
                        observedPoints[i].Y = points[i].Y;
                    }
                }
                #endregion

                //比较结果
                return(result);
            }
        }
        public Tuple <Image <Bgr, byte>, HomographyMatrix> SURFMatcher_KNN(Image <Gray, byte> model, Image <Gray, byte> observed, SURFDetector surfCPU, List <VectorOfKeyPoint> keyPointsList, double uniquenessThreshold, int TM)
        {
            HomographyMatrix  homography        = null;
            Image <Bgr, Byte> result            = null;
            VectorOfKeyPoint  modelKeyPoints    = keyPointsList.First <VectorOfKeyPoint>();
            VectorOfKeyPoint  observedKeyPoints = keyPointsList.Last <VectorOfKeyPoint>();;
            Matrix <int>      indices;
            Matrix <byte>     mask;
            int k = 2;
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);


            try
            {
                result = observed.Convert <Bgr, byte>();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints);
                matcher.Add(modelDescriptors);
                if (observedKeyPoints.Size > 0)
                {
                    Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints);
                    indices = new Matrix <int>(observedDescriptors.Rows, k);

                    using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                    {
                        matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                        mask = new Matrix <byte>(dist.Rows, 1);
                        mask.SetValue(255);
                        Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                    }

                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                    if (nonZeroCount >= TM)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                        if (nonZeroCount >= TM)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                        }
                    }
                    result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints,
                                                           indices, new Bgr(100, 200, 214), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
                }
                return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography));
            }
            catch (Exception e)
            {
                throw e;
            }
        }
        public static void FindMatch(string pageFile, string templateFile)
        {
            Image <Rgb, byte> page     = getPreprocessedImage(pageFile);
            Image <Rgb, byte> template = getPreprocessedImage(templateFile);

            var detector = new ORBDetector();
            VectorOfKeyPoint templateKeyPoints = new VectorOfKeyPoint();
            Mat templateDescriptors            = new Mat();

            detector.DetectAndCompute(template, null, templateKeyPoints, templateDescriptors, false);

            VectorOfKeyPoint pageKeyPoints = new VectorOfKeyPoint();
            Mat pageDescriptors            = new Mat();

            detector.DetectAndCompute(page, null, pageKeyPoints, pageDescriptors, false);
            using (var matcher = new BFMatcher(DistanceType.L1))
            {
                matcher.Add(templateDescriptors);
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                //VectorOfDMatch matches2 = new VectorOfDMatch();
                //matcher.Match(pageDescriptors, matches2);


                matcher.KnnMatch(pageDescriptors, matches, 2, null);

                Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
                Mat homography   = new Mat();
                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(templateKeyPoints, pageKeyPoints, matches, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(templateKeyPoints, pageKeyPoints, matches, mask, 2);
                    }
                }

                Mat result = new Mat();
                Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                //Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches2, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), null, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                MainForm.This.PageBox.Image = result.ToBitmap();
            }
        }
Exemple #26
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <param name="featureDetectorExtractor">The feature detector extractor</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Mat Draw(Mat modelImage, Mat observedImage, Feature2D featureDetectorExtractor, out long matchTime)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, featureDetectorExtractor, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography);

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                    Point[] points = new Point[pts.Length];
                    for (int i = 0; i < points.Length; i++)
                    {
                        points[i] = Point.Round(pts[i]);
                    }

                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }
                #endregion

                return(result);
            }
        }
Exemple #27
0
        public static Mat ClassifyAndShowResult(Mat modelImage, Mat observedImage, double uniquenessThreshold, int k, out long score)
        {
            VectorOfKeyPoint       modelKeyPoints    = null;
            VectorOfKeyPoint       observedKeyPoints = null;
            VectorOfVectorOfDMatch matches           = null;
            Mat homography = null;

            score = 0;
            var mask   = ClassifyForDrawing(modelImage, observedImage, uniquenessThreshold, k, out modelKeyPoints, out observedKeyPoints, out matches, out homography, out score);
            var result = new Mat();

            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                          matches, result, new MCvScalar(0, 0, 0), new MCvScalar(0, 0, 0), mask);
            Draw(homography, result, modelImage);
            return(result);
        }
Exemple #28
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Mat Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime)
        {
            HomographyMatrix homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Matrix <byte> mask;

                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography);

                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = modelImage.ROI;
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    homography.ProjectPoints(pts);

                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                    //result.DrawPolyline(, true, new Bgr(Color.Red), 5);
                }

                #endregion

                return(result);
            }
        }
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public Mat Draw(Mat modelImage, Mat observedImage, out long matchTime, out bool check)
        {
            int newimage = 0;

            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask = new Mat();
                check = FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                                  out mask, out homography);
                // Console.WriteLine("Model points: " + modelKeyPoints.Size + "  Observed points:" + observedKeyPoints.Size + "  Match points:"+ matches.Size);
                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }

                #endregion

                return(result);
            }
        }
Exemple #30
0
        public static Mat Draw(UMat modelImage, UMat observedImage, out long matchTime, Mat model, Mat source)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask = new Mat();
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography);

                //Draw the matched keypoints
                Mat REsult = new Mat();
                int leng   = matches.Size;
                int L      = modelKeyPoints.Size;
                Features2DToolbox.DrawMatches(model, modelKeyPoints, source, observedKeyPoints,
                                              matches, REsult, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                #region draw the projected region on the image

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, model.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(REsult, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                }

                #endregion

                return(REsult);
            }
        }