Exemplo n.º 1
2
      public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
      {
         int k = 2;
         double uniquenessThreshold = 0.80;

         Stopwatch watch;
         homography = null;

         modelKeyPoints = new VectorOfKeyPoint();
         observedKeyPoints = new VectorOfKeyPoint();

         using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
         using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
         {
            KAZE featureDetector = new KAZE();

            //extract features from the object image
            Mat modelDescriptors = new Mat();
            featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            Mat observedDescriptors = new Mat();
            featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            
            matcher.Add(modelDescriptors);

            matcher.KnnMatch(observedDescriptors, matches, k, null);
            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);
            if (nonZeroCount >= 4)
            {
               nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                  matches, mask, 1.5, 20);
               if (nonZeroCount >= 4)
                  homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                     observedKeyPoints, matches, mask, 2);
            }

            watch.Stop();

         }
         matchTime = watch.ElapsedMilliseconds;
      }
Exemplo n.º 2
1
      public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
      {
         int k = 2;
         double uniquenessThreshold = 0.8;
         double hessianThresh = 300;
         
         Stopwatch watch;
         homography = null;

         modelKeyPoints = new VectorOfKeyPoint();
         observedKeyPoints = new VectorOfKeyPoint();

         #if !__IOS__
         if ( CudaInvoke.HasCuda)
         {
            CudaSURF surfCuda = new CudaSURF((float) hessianThresh);
            using (GpuMat gpuModelImage = new GpuMat(modelImage))
            //extract features from the object image
            using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
            using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
            using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
            {
               surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
               watch = Stopwatch.StartNew();

               // extract features from the observed image
               using (GpuMat gpuObservedImage = new GpuMat(observedImage))
               using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
               using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
               //using (GpuMat tmp = new GpuMat())
               //using (Stream stream = new Stream())
               {
                  matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                  surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                  mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                  mask.SetTo(new MCvScalar(255));
                  Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                  int nonZeroCount = CvInvoke.CountNonZero(mask);
                  if (nonZeroCount >= 4)
                  {
                     nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                        matches, mask, 1.5, 20);
                     if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                           observedKeyPoints, matches, mask, 2);
                  }
               }
                  watch.Stop();
               }
            }
         else
         #endif
         {
            using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
            using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
            {
               SURF surfCPU = new SURF(hessianThresh);
               //extract features from the object image
               UMat modelDescriptors = new UMat();
               surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

               watch = Stopwatch.StartNew();

               // extract features from the observed image
               UMat observedDescriptors = new UMat();
               surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
               BFMatcher matcher = new BFMatcher(DistanceType.L2);
               matcher.Add(modelDescriptors);

               matcher.KnnMatch(observedDescriptors, matches, k, null);
               mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
               mask.SetTo(new MCvScalar(255));
               Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

               int nonZeroCount = CvInvoke.CountNonZero(mask);
               if (nonZeroCount >= 4)
               {
                  nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                     matches, mask, 1.5, 20);
                  if (nonZeroCount >= 4)
                     homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                        observedKeyPoints, matches, mask, 2);
               }

               watch.Stop();
            }
         }
         matchTime = watch.ElapsedMilliseconds;
      }
Exemplo n.º 3
0
        public Mat Calculate(Bitmap referenceBitmap, Bitmap currentBitmap)
        {
            Mat homography;
            using (var detector = new SURF(threshold))
            using (var model = new Image<Gray, byte>(referenceBitmap))
            using (var modelMat = model.Mat.ToUMat(AccessType.Read))
            using (var modelKeyPoints = new VectorOfKeyPoint())
            using (var modelDescriptors = new UMat())
            using (var observed = new Image<Gray, byte>(currentBitmap))
            using (var observedMat = observed.Mat.ToUMat(AccessType.Read))
            using (var observedKeyPoints = new VectorOfKeyPoint())
            using (var observedDescriptors = new UMat())
            using (var matcher = new BFMatcher(DistanceType.L2))
            using (var matches = new VectorOfVectorOfDMatch())
            {
                detector.DetectAndCompute(modelMat, null, modelKeyPoints, modelDescriptors, false);
                detector.DetectAndCompute(observedMat, null, observedKeyPoints, observedDescriptors, false);

                matcher.Add(modelDescriptors);
                matcher.KnnMatch(observedDescriptors, matches, k, null);

                homography = TryFindHomography(modelKeyPoints, observedKeyPoints, matches);
            }

            return homography;
        }
Exemplo n.º 4
0
        /// <summary>
        /// Get homography projection for the observedImage
        /// </summary>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Point[] GetMatchingPoints(Mat observedImage, out long matchTime)
        {
            matchTime = 0;
            Point[] result = new Point[0];
            if (modelImage == null) return result;

            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (Mat gray = new Mat())
            {
                CvInvoke.CvtColor(observedImage, gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                {
                    Mat mask;
                    FindMatch(modelImage, gray, out matchTime, out modelKeyPoints, out observedKeyPoints, matches, out mask, out homography);
                    if (homography != null)
                    {
                        Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                        PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) };
                        pts = CvInvoke.PerspectiveTransform(pts, homography);
                        result = Array.ConvertAll<PointF, Point>(pts, Point.Round);
                    }
                    return result;
                }
            }
        }
Exemplo n.º 5
0
        private Mat TryFindHomography(VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints,
            VectorOfVectorOfDMatch matches)
        {
            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));

            try
            {
                Features2DToolbox.VoteForUniqueness(matches, threshold, mask);

                var nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount < 4)
                {
                    return null;
                }

                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                    matches, mask, 1.5, 20);

                if (nonZeroCount >= 4)
                {
                    return Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                        observedKeyPoints, matches, mask, 2);
                }
            }
            catch (Exception)
            {
                return null;
            }
            return null;
        }
Exemplo n.º 6
0
        public Mat Calculate(Bitmap referenceBitmap, Bitmap currentBitmap)
        {
            Mat homography;

            using (var detector = new CudaSURF(threshold))
            using (var model = new Image<Gray, byte>(referenceBitmap))
            using (var observed = new Image<Gray, byte>(currentBitmap))
            using (var modelMat = new GpuMat(model))
            using (var modelKeyPointsRaw = detector.DetectKeyPointsRaw(modelMat))
            using (var modelKeyPoints = new VectorOfKeyPoint())
            using (var modelDescriptorsRaw = detector.ComputeDescriptorsRaw(modelMat, null, modelKeyPointsRaw))
            using (var observedMat = new GpuMat(observed))
            using (var observedKeyPointsRaw = detector.DetectKeyPointsRaw(observedMat))
            using (var observedKeyPoints = new VectorOfKeyPoint())
            using (var observedDescriptorsRaw = detector.ComputeDescriptorsRaw(observedMat, null, observedKeyPointsRaw))
            using (
                var matcher =
                    new CudaBFMatcher(DistanceType.L2))
            using (var matches = new VectorOfVectorOfDMatch())
            {
                matcher.KnnMatch(observedDescriptorsRaw, modelDescriptorsRaw, matches, k);

                detector.DownloadKeypoints(modelKeyPointsRaw, modelKeyPoints);
                detector.DownloadKeypoints(observedKeyPointsRaw, observedKeyPoints);

                homography = TryFindHomography(modelKeyPoints, observedKeyPoints, matches);
            }

            return homography;
        }
Exemplo n.º 7
0
 /// <summary>
 /// Find the k-nearest match
 /// </summary>
 /// <param name="queryDescriptors">An n x m matrix of descriptors to be query for nearest neighbors. n is the number of descriptor and m is the size of the descriptor</param>
 /// <param name="k">Number of nearest neighbors to search for</param>
 /// <param name="mask">Can be null if not needed. An n x 1 matrix. If 0, the query descriptor in the corresponding row will be ignored.</param>
 /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
 public void KnnMatch(IInputArray queryDescriptors, IInputArray trainDescriptors, VectorOfVectorOfDMatch matches, int k, IInputArray mask = null, bool compactResult = false)
 {
    using (InputArray iaQueryDescriptors = queryDescriptors.GetInputArray())
    using (InputArray iaTrainDescriptors = trainDescriptors.GetInputArray() )
    using (InputArray iaMask = (mask == null ? InputArray.GetEmpty() : mask.GetInputArray()))
       CudaInvoke.cveCudaDescriptorMatcherKnnMatch(_ptr, iaQueryDescriptors, iaTrainDescriptors, matches, k, iaMask, compactResult);
 }
        public List<Object> findObjects(VideoReader videoReader, IDepthReader depthReader, Action<ushort[], CameraSpacePoint[]> mappingFunction, IProgress<int> progress)
        {
            List<Object> objects = new List<Object>();
            var patternNameToFrames = new Dictionary<string, List<int>>();

            for (int i = 0; i < videoReader.frameCount; i++)
            {
                string[] patterns = Directory.GetFiles(patternDirectory);

                foreach (var pattern in patterns)
                {
                    if (pattern.EndsWith(".png"))
                    {
                        long matchTime;
                        using (Mat modelImage = CvInvoke.Imread(pattern, LoadImageType.Grayscale))
                        using (Mat observedImage = videoReader.getFrame(i))
                            using (Mat grayImage = new Mat())
                        {
                            CvInvoke.CvtColor(observedImage, grayImage, ColorConversion.Rgb2Gray);
                            Mat homography;
                            VectorOfKeyPoint modelKeyPoints;
                            VectorOfKeyPoint observedKeyPoints;
                            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                            {
                                Mat mask;
                                DrawMatches.FindMatch(modelImage, grayImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                                out mask, out homography);

                                if (homography != null)
                                {
                                    //draw a rectangle along the projected model
                                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                                    System.Drawing.PointF[] pts = new System.Drawing.PointF[]
                                    {
                                      new System.Drawing.PointF(rect.Left, rect.Bottom),
                                      new System.Drawing.PointF(rect.Right, rect.Bottom),
                                      new System.Drawing.PointF(rect.Right, rect.Top),
                                      new System.Drawing.PointF(rect.Left, rect.Top)
                                    };
                                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                                    Point[] points = Array.ConvertAll<System.Drawing.PointF, Point>(pts, Point.Round);
                                }
                            }
                        }
                    }
                }
            }

            return objects;
        }
Exemplo n.º 9
0
        private void process ()
        {
            string[] patterns = Directory.GetFiles("pattern");

            foreach (var pattern in patterns)
            {
                Console.WriteLine(pattern);
                if (pattern.EndsWith(".png"))
                {
                    long matchTime;
                    using (Mat modelImage = CvInvoke.Imread(pattern, LoadImageType.Grayscale))
                    using (Mat observedImage = capture.QueryFrame())
                    using (Mat grayImage = new Mat())
                    {
                        CvInvoke.CvtColor(observedImage, grayImage, ColorConversion.Rgb2Gray);
                        Mat homography;
                        VectorOfKeyPoint modelKeyPoints;
                        VectorOfKeyPoint observedKeyPoints;
                        using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        {
                            Mat mask;
                            DrawMatches.FindMatch(modelImage, grayImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                            out mask, out homography);

                            if (homography != null)
                            {
                                Console.WriteLine("Find homography");
                                //draw a rectangle along the projected model
                                Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                                PointF[] pts = new PointF[]
                                {
                                      new PointF(rect.Left, rect.Bottom),
                                      new PointF(rect.Right, rect.Bottom),
                                      new PointF(rect.Right, rect.Top),
                                      new PointF(rect.Left, rect.Top)
                                };
                                pts = CvInvoke.PerspectiveTransform(pts, homography);

                                Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
                                foreach ( var point in points)
                                {
                                    Console.WriteLine(point);
                                }
                                markedPoints[pattern] = points;
                            }
                        }
                    }
                }
            }
        }
Exemplo n.º 10
0
 /// <summary>
 /// Draw the matched keypoints between the model image and the observered image.
 /// </summary>
 /// <param name="modelImage">The model image</param>
 /// <param name="modelKeypoints">The keypoints in the model image</param>
 /// <param name="observerdImage">The observed image</param>
 /// <param name="observedKeyPoints">The keypoints in the observed image</param>
 /// <param name="matchColor">The color for the match correspondence lines</param>
 /// <param name="singlePointColor">The color for highlighting the keypoints</param>
 /// <param name="mask">The mask for the matches. Use null for all matches.</param>
 /// <param name="flags">The drawing type</param>
 /// <param name="result">The image where model and observed image is displayed side by side. Matches are drawn as indicated by the flag</param>
 /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
 public static void DrawMatches(
    IInputArray modelImage, VectorOfKeyPoint modelKeypoints,
    IInputArray observerdImage, VectorOfKeyPoint observedKeyPoints,
    VectorOfVectorOfDMatch matches,
    IInputOutputArray result,
    MCvScalar matchColor, MCvScalar singlePointColor,
    IInputArray mask = null,
    KeypointDrawType flags = KeypointDrawType.Default)
 {
    using (InputArray iaModelImage = modelImage.GetInputArray())
    using (InputArray iaObserverdImage = observerdImage.GetInputArray())
    using (InputOutputArray ioaResult = result.GetInputOutputArray())
    using (InputArray iaMask = mask == null ? InputArray.GetEmpty() : mask.GetInputArray())
    CvInvoke.drawMatchedFeatures(iaObserverdImage, observedKeyPoints, iaModelImage,
       modelKeypoints, matches, ioaResult, ref matchColor, ref singlePointColor, iaMask , flags);
 }
Exemplo n.º 11
0
      /// <summary>
      /// Draw the model image and observed image, the matched features and homography projection.
      /// </summary>
      /// <param name="modelImage">The model image</param>
      /// <param name="observedImage">The observed image</param>
      /// <param name="matchTime">The output total time for computing the homography matrix.</param>
      /// <returns>The model image and observed image, the matched features and homography projection.</returns>
      public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
      {
         Mat homography;
         VectorOfKeyPoint modelKeyPoints;
         VectorOfKeyPoint observedKeyPoints;
         using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
         {
            Mat mask;
            FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
               out mask, out homography);

            //Draw the matched keypoints
            Mat result = new Mat();
            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
               matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

            #region draw the projected region on the image

            if (homography != null)
            {
               //draw a rectangle along the projected model
               Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
               PointF[] pts = new PointF[]
               {
                  new PointF(rect.Left, rect.Bottom),
                  new PointF(rect.Right, rect.Bottom),
                  new PointF(rect.Right, rect.Top),
                  new PointF(rect.Left, rect.Top)
               };
               pts = CvInvoke.PerspectiveTransform(pts, homography);

#if NETFX_CORE
               Point[] points = Extensions.ConvertAll<PointF, Point>(pts, Point.Round);
#else
               Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
#endif
               using (VectorOfPoint vp = new VectorOfPoint(points))
               {
                  CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
               }
            }
#endregion

            return result;

         }
      }
Exemplo n.º 12
0
 /// <summary>
 /// Find the k-nearest match
 /// </summary>
 /// <param name="queryDescriptor">An n x m matrix of descriptors to be query for nearest neighbours. n is the number of descriptor and m is the size of the descriptor</param>
 /// <param name="k">Number of nearest neighbors to search for</param>
 /// <param name="mask">Can be null if not needed. An n x 1 matrix. If 0, the query descriptor in the corresponding row will be ignored.</param>
 /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
 public void KnnMatch(IInputArray queryDescriptor, VectorOfVectorOfDMatch matches, int k, IInputArray mask)
 {
     using (InputArray iaQueryDesccriptor = queryDescriptor.GetInputArray())
         using (InputArray iaMask = mask == null ? InputArray.GetEmpty() : mask.GetInputArray())
             DescriptorMatcherInvoke.CvDescriptorMatcherKnnMatch(_descriptorMatcherPtr, iaQueryDesccriptor, matches, k, iaMask);
 }
Exemplo n.º 13
0
        public Bitmap DrawSift(Image <Rgb, byte> modelimage, Image <Rgb, byte> observedimage)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;


            VectorOfKeyPoint modelKeyPoints    = new VectorOfKeyPoint(),
                             observedKeyPoints = new VectorOfKeyPoint();

            Mat modeldiscriptors    = new Mat();
            Mat observeddiscriptors = new Mat();

            //observedKeyPoints = observedKeyPoints.Resize(1.0 / Compression, Inter.Area);
            using (SIFT siftCPU = new SIFT(0, 5, 0.04, 10.0, 1.6))
            {
                siftCPU.DetectAndCompute(modelimage, null, modelKeyPoints, modeldiscriptors, false);
                observedKeyPoints = new VectorOfKeyPoint(siftCPU.Detect(observedimage));
                siftCPU.Compute(observedimage, observedKeyPoints, observeddiscriptors);
            }
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                    using (Emgu.CV.Features2D.DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        matcher.Add(modeldiscriptors);
                        matcher.KnnMatch(observeddiscriptors, matches, k, null);
                    }

            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            Mat homography = null;

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                           matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                          observedKeyPoints, matches, mask, 2);
                }
            }

            observedimage = new Image <Rgb, byte>(DrawZone(observedimage.Mat, observedKeyPoints, matches, mask).Bitmap);
            //modelKeyPoints.FilterByPixelsMask(new Image<Gray, byte>(mask.Bitmap));
            //observedKeyPoints.FilterByPixelsMask(new Image<Gray, byte>(mask.Bitmap));

            Mat result = new Mat();

            //Draw the matched keypoints
            Features2DToolbox.DrawMatches(modelimage, modelKeyPoints, observedimage, observedKeyPoints,
                                          matches, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), mask);
            if (homography != null)
            {
                //draw a rectangle along the projected model
                SD.Rectangle rect = new SD.Rectangle(SD.Point.Empty, modelimage.Size);
                PointF[]     pts  = new PointF[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                pts = CvInvoke.PerspectiveTransform(pts, homography);

#if NETFX_CORE
                Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round);
#else
                SD.Point[] points = Array.ConvertAll <PointF, SD.Point>(pts, SD.Point.Round);
#endif
                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    CvInvoke.Polylines(result, vp, true, new MCvScalar(0, 0, 255), 2);
                }
            }

            return(result.Bitmap);
        }
Exemplo n.º 14
0
         /// <summary>
         /// Filter the matched Features, such that if a match is not unique, it is rejected.
         /// </summary>
         /// <param name="uniquenessThreshold">The distance different ratio which a match is consider unique, a good number will be 0.8</param>
         /// <param name="mask">This is both input and output. This matrix indicates which row is valid for the matches.</param>
         /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param> 
         public static void VoteForUniqueness(VectorOfVectorOfDMatch matches, double uniquenessThreshold, Mat mask)
         {
            MDMatch[][] mArr = matches.ToArrayOfArray();
            byte[] maskData = new byte[mArr.Length];
            GCHandle maskHandle = GCHandle.Alloc(maskData, GCHandleType.Pinned);
            using (Mat m = new Mat(mArr.Length, 1, DepthType.Cv8U, 1, maskHandle.AddrOfPinnedObject(), 1))
            {
               mask.CopyTo(m);
               for (int i = 0; i < mArr.Length; i++)
               {
                  if (maskData[i] != 0 && (mArr[i][0].Distance / mArr[i][1].Distance) <= uniquenessThreshold)
                  {
                     maskData[i] = (byte)255;
                  }
               }

               m.CopyTo(mask);
            }
            maskHandle.Free();

         }
Exemplo n.º 15
0
      private void FindStopSign(Mat img, List<Mat> stopSignList, List<Rectangle> boxList, VectorOfVectorOfPoint contours, int[,] hierachy, int idx)
      {
         for (; idx >= 0; idx = hierachy[idx, 0])
         {
            using (VectorOfPoint c = contours[idx])
            using (VectorOfPoint approx = new VectorOfPoint())
            {
               CvInvoke.ApproxPolyDP(c, approx, CvInvoke.ArcLength(c, true) * 0.02, true);
               double area = CvInvoke.ContourArea(approx);
               if (area > 200)
               {
                  double ratio = CvInvoke.MatchShapes(_octagon, approx, Emgu.CV.CvEnum.ContoursMatchType.I3);

                  if (ratio > 0.1) //not a good match of contour shape
                  {
                     //check children
                     if (hierachy[idx, 2] >= 0)
                        FindStopSign(img, stopSignList, boxList, contours, hierachy, hierachy[idx, 2]);
                     continue;
                  }

                  Rectangle box = CvInvoke.BoundingRectangle(c);

                  Mat candidate = new Mat();
                  using (Mat tmp = new Mat(img, box))
                     CvInvoke.CvtColor(tmp, candidate, ColorConversion.Bgr2Gray);

                  //set the value of pixels not in the contour region to zero
                  using (Mat mask = new Mat(candidate.Size.Height, candidate.Width, DepthType.Cv8U, 1))
                  {
                     mask.SetTo(new MCvScalar(0));
                     CvInvoke.DrawContours(mask, contours, idx, new MCvScalar(255), -1, LineType.EightConnected, null, int.MaxValue, new Point(-box.X, -box.Y));

                     double mean = CvInvoke.Mean(candidate, mask).V0;
                     CvInvoke.Threshold(candidate, candidate, mean, 255, ThresholdType.Binary);
                     CvInvoke.BitwiseNot(candidate, candidate);
                     CvInvoke.BitwiseNot(mask, mask);

                     candidate.SetTo(new MCvScalar(0), mask);
                  }

                  int minMatchCount = 8;
                  double uniquenessThreshold = 0.8;
                  VectorOfKeyPoint _observeredKeypoint = new VectorOfKeyPoint();
                  Mat _observeredDescriptor = new Mat();
                  _detector.DetectAndCompute(candidate, null, _observeredKeypoint, _observeredDescriptor, false);

                  if (_observeredKeypoint.Size >= minMatchCount)
                  {
                     int k = 2;

                     Mat mask;

                     using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                     {
                        _modelDescriptorMatcher.KnnMatch(_observeredDescriptor, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                     }

                     int nonZeroCount = CvInvoke.CountNonZero(mask);
                     if (nonZeroCount >= minMatchCount)
                     {
                        boxList.Add(box);
                        stopSignList.Add(candidate);
                     }
                  }
               }
            }
         }
      }
Exemplo n.º 16
0
 /// <summary>
 /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation.
 /// </summary>
 /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param>
 /// <param name="scaleIncrement">This determines the different in scale for neighbor hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param>
 /// <param name="modelKeyPoints">The keypoints from the model image</param>
 /// <param name="observedKeyPoints">The keypoints from the observed image</param>
 /// <param name="mask">This is both input and output. This matrix indicates which row is valid for the matches.</param>
 /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
 /// <returns> The number of non-zero elements in the resulting mask</returns>
 public static int VoteForSizeAndOrientation(VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints,
    VectorOfVectorOfDMatch matches, Mat mask, double scaleIncrement, int rotationBins)
 {
    return CvInvoke.voteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, scaleIncrement,
       rotationBins);
 }
Exemplo n.º 17
0
        public static bool Draw(Mat modelImage, Mat observedImage, out long matchTime, out Rectangle targetPos, double threshold)
        {
            Mat homography;
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                          out mask, out homography, threshold);
                bool haveHomography = false;
                //Draw the matched keypoints
                Mat result = new Mat();
                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                              matches, result, new MCvScalar(255, 255, 255), new MCvScalar(0, 0, 0), mask);


                #region draw the projected region on the image
                Mat img = new Mat();
                if (homography != null)
                {
                    haveHomography = true;
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);


                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);


                    Rectangle targetArea = new Rectangle(
                        points[0].X,
                        points[0].Y,
                        points[2].X - points[0].X,
                        points[2].Y - points[0].Y);
                    img = observedImage.Clone();
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                    //CvInvoke.Imshow("re", result);
                    //CvInvoke.WaitKey(0);
                    targetPos = targetArea;
                }
                else
                {
                    targetPos = new Rectangle();
                }
                #endregion

                //return img;
                return(haveHomography);
            }
        }
Exemplo n.º 18
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // KdTree for faster results / less accuracy
                    using (var ip = new Emgu.CV.Flann.KdTreeIndexParams())
                        using (var sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                // Calculate score based on matches size
                                // ---------------------------------------------->
                                score = 0;
                                for (int i = 0; i < matches.Size; i++)
                                {
                                    if (mask.GetData(i)[0] == 0)
                                    {
                                        continue;
                                    }
                                    foreach (var e in matches[i].ToArray())
                                    {
                                        ++score;
                                    }
                                }
                                // <----------------------------------------------

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 19
0
 /// <summary>
 /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation.
 /// </summary>
 /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param>
 /// <param name="scaleIncrement">This determines the different in scale for neighbor hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param>
 /// <param name="modelKeyPoints">The keypoints from the model image</param>
 /// <param name="observedKeyPoints">The keypoints from the observed image</param>
 /// <param name="mask">This is both input and output. This matrix indicates which row is valid for the matches.</param>
 /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
 /// <returns> The number of non-zero elements in the resulting mask</returns>
 public static int VoteForSizeAndOrientation(VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints,
                                             VectorOfVectorOfDMatch matches, Mat mask, double scaleIncrement, int rotationBins)
 {
     return(Features2DInvoke.voteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, scaleIncrement,
                                                       rotationBins));
 }
Exemplo n.º 20
0
        public void Match(IInputArray queryDescriptors, IInputArray trainDescriptors, VectorOfVectorOfDMatch matches)
        {
            Monitor.Enter(locker);
            WindowsFormHelper.AddLogToConsole("In monitor");

            var _cudaBruteForceMatcher = CreateMatcher();
            var left  = new GpuMat(queryDescriptors as Mat);
            var right = new GpuMat(trainDescriptors as Mat);

            _cudaBruteForceMatcher.KnnMatch(right, left, matches, 1);
            left.Dispose();
            right.Dispose();
            _cudaBruteForceMatcher.Dispose();
            WindowsFormHelper.AddLogToConsole("Exit monitor");
            Monitor.Exit(locker);
        }
Exemplo n.º 21
0
        /// <summary>
        /// Finds matching points in the faces using SURF
        /// </summary>
        /// <param name="modelImage">
        /// The model image.
        /// </param>
        /// <param name="observedImage">
        /// The observed image.
        /// </param>
        /// <param name="matchTime">
        /// The match time.
        /// </param>
        /// <param name="modelKeyPoints">
        /// The model key points.
        /// </param>
        /// <param name="observedKeyPoints">
        /// The observed key points.
        /// </param>
        /// <param name="matches">
        /// The matches.
        /// </param>
        /// <param name="mask">
        /// The mask.
        /// </param>
        /// <param name="homography">
        /// The homography.
        /// </param>
        /// <param name="score">
        /// The score.
        /// </param>
        private void FindMatch(
            Mat modelImage,
            Mat observedImage,
            out long matchTime,
            out VectorOfKeyPoint modelKeyPoints,
            out VectorOfKeyPoint observedKeyPoints,
            VectorOfVectorOfDMatch matches,
            out Mat mask,
            out Mat homography,
            out long score)
        {
            int       k = 2;
            double    uniquenessThreshold = 5;
            Stopwatch watch;

            homography = null;
            mask       = null;
            score      = 0;



            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();



            if (Controller.Instance.Cuda)
            {
                CudaSURF surfGPU = new CudaSURF(700f, 4, 2, false);
                using (CudaImage <Gray, byte> gpuModelImage = new CudaImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat gpuModelDescriptors =
                                   surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
                            {
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (CudaImage <Gray, Byte> gpuObservedImage = new CudaImage <Gray, byte>(observedImage))
                                    using (GpuMat gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(
                                                   gpuObservedImage,
                                                   null,
                                                   gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(
                                                       gpuObservedDescriptors.Size.Height,
                                                       k,
                                                       1,
                                                       true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(
                                                           gpuObservedDescriptors.Size.Height,
                                                           k,
                                                           1,
                                                           true))
                                                    //using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                    using (Emgu.CV.Cuda.Stream stream = new Emgu.CV.Cuda.Stream())
                                                    {
                                                        matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null);
                                                        //indices = new Matrix<int>(gpuMatchIndices.Size);
                                                        //mask = new Matrix<byte>(gpuMask.Size);

                                                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                                        mask.SetTo(new MCvScalar(255));


                                                        surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                        /*//gpu implementation of voteForUniquess
                                                         * using (GpuMat col0 = gpuMatchDist.Col(0))
                                                         * using (GpuMat col1 = gpuMatchDist.Col(1))
                                                         * {
                                                         *  CudaInvoke.Multiply(col1, new GpuMat(), col1, 1, DepthType.Default, stream);
                                                         *  CudaInvoke.Compare(col0, col1, mask, CmpType.LessEqual, stream);
                                                         * }*/

                                                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                                        //wait for the stream to complete its tasks
                                                        //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                        stream.WaitForCompletion();
                                                        //gpuMatchIndices.Download(indices);
                                                        if (CudaInvoke.CountNonZero(mask) >= 4)
                                                        {
                                                            int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(
                                                                modelKeyPoints,
                                                                observedKeyPoints,
                                                                matches,
                                                                mask,
                                                                1.5,
                                                                20);
                                                            if (nonZeroCount >= 4)
                                                            {
                                                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                                                                    modelKeyPoints,
                                                                    observedKeyPoints,
                                                                    matches,
                                                                    mask,
                                                                    2);
                                                            }
                                                        }

                                                        watch.Stop();
                                                    }

                                for (int i = 0; i < matches.Size; i++)
                                {
                                    score++;
                                }
                            }
            }

            //else
            //{
            //    SURF surfCPU = new SURF(500, 4, 2, false);
            //    //extract features from the object image
            //    modelKeyPoints = new VectorOfKeyPoint();
            //    Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            //    watch = Stopwatch.StartNew();

            //    // extract features from the observed image
            //    observedKeyPoints = new VectorOfKeyPoint();
            //    Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            //    BFMatcher matcher = new BFMatcher<float>(DistanceType.L2);
            //    matcher.Add(modelDescriptors);

            //    indices = new Matrix<int>(observedDescriptors.Rows, k);
            //    using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
            //    {
            //        matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
            //        mask = new Matrix<byte>(dist.Rows, 1);
            //        mask.SetValue(255);
            //        Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            //    }

            //    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            //    if (nonZeroCount >= 4)
            //    {
            //        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
            //        if (nonZeroCount >= 4)
            //            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
            //    }

            //    watch.Stop();
            //}
            matchTime = 0;
        }
Exemplo n.º 22
0
      public void TestBruteForceHammingDistance()
      {
         if (CudaInvoke.HasCuda)
         {
            Image<Gray, byte> box = new Image<Gray, byte>("box.png");
            FastDetector fast = new FastDetector(100, true);
            BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);

            #region extract features from the object image
            Stopwatch stopwatch = Stopwatch.StartNew();
            VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
            fast.DetectRaw(box, modelKeypoints);
            Mat modelDescriptors = new Mat();
            brief.Compute(box, modelKeypoints, modelDescriptors);
            stopwatch.Stop();
            Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
            #endregion

            Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");

            #region extract features from the observed image
            stopwatch.Reset(); stopwatch.Start();
            VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
            fast.DetectRaw(observedImage, observedKeypoints);
            Mat observedDescriptors = new Mat();
            brief.Compute(observedImage, observedKeypoints, observedDescriptors);
            stopwatch.Stop();
            Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
            #endregion

            Mat homography = null;
            using (GpuMat<Byte> gpuModelDescriptors = new GpuMat<byte>(modelDescriptors)) //initialization of GPU code might took longer time.
            {
               stopwatch.Reset(); stopwatch.Start();
               CudaBFMatcher hammingMatcher = new CudaBFMatcher(DistanceType.Hamming);

               //BFMatcher hammingMatcher = new BFMatcher(BFMatcher.DistanceType.Hamming, modelDescriptors);
               int k = 2;
               Matrix<int> trainIdx = new Matrix<int>(observedKeypoints.Size, k);
               Matrix<float> distance = new Matrix<float>(trainIdx.Size);

               using (GpuMat<Byte> gpuObservedDescriptors = new GpuMat<byte>(observedDescriptors))
               //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true))
               //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true))
               using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
               {
                  Stopwatch w2 = Stopwatch.StartNew();
                  //hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);
                  hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null, true);
                  w2.Stop();
                  Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec",
                     w2.ElapsedMilliseconds));
                  //gpuTrainIdx.Download(trainIdx);
                  //gpuDistance.Download(distance);


                  Mat mask = new Mat(distance.Rows, 1, DepthType.Cv8U, 1);
                  mask.SetTo(new MCvScalar(255));
                  Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                  int nonZeroCount = CvInvoke.CountNonZero(mask);
                  if (nonZeroCount >= 4)
                  {
                     nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints,
                        matches, mask, 1.5, 20);
                     if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints,
                           observedKeypoints, matches, mask, 2);
                     nonZeroCount = CvInvoke.CountNonZero(mask);
                  }

                  stopwatch.Stop();
                  Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec",
                     stopwatch.ElapsedMilliseconds));
               }
            }

            if (homography != null)
            {
               Rectangle rect = box.ROI;
               PointF[] pts = new PointF[] { 
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};

               PointF[] points = CvInvoke.PerspectiveTransform(pts, homography);
               //homography.ProjectPoints(points);

               //Merge the object image and the observed image into one big image for display
               Image<Gray, Byte> res = box.ConcateVertical(observedImage);

               for (int i = 0; i < points.Length; i++)
                  points[i].Y += box.Height;
               res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
               //ImageViewer.Show(res);
            }
         }
      }
Exemplo n.º 23
0
        private string[] FindFeatureMatches(string sourceFile, string[] targetFiles, ObservableCollection <string> output)
        {
            //Emgu.CV.CvInvoke
            // Currently we are only interested in jpg files.
            targetFiles = targetFiles
                          .Where(targetFile =>
            {
                var extension = Path.GetExtension(targetFile).ToLower();
                return(extension == ".jpg" || extension == ".jpeg");
            })
                          .ToArray();

            var matchingFiles = new List <string>();

            using var sourceImage = CvInvoke.Imread(sourceFile, ImreadModes.Grayscale);
            using var sourceMat   = new GpuMat();

            //CudaInvoke.CvtColor(sourceImage, sourceMat, ColorConversion.Bgr2Bgra);

            sourceMat.Upload(sourceImage);
            using var sourceDescriptors = new GpuMat();

            using var detector = new CudaORBDetector();
            var sourceKeyPoints = detector.Detect(sourceMat, null);

            detector.Compute(sourceMat, new VectorOfKeyPoint(sourceKeyPoints), sourceDescriptors);
            //detector.DetectAndCompute(sourceImage, null, sourceKeyPoints, sourceDescriptors, false);


            Parallel.ForEach(targetFiles, new ParallelOptions {
                MaxDegreeOfParallelism = 40
            }, targetFile =>
            {
                try
                {
                    if (targetFile == sourceFile)
                    {
                        return;                               // No need to match the original file.
                    }
                    if (new FileInfo(targetFile).Length == 0) // We cannot compare empty images.
                    {
                        return;
                    }

                    using var targetImage = CvInvoke.Imread(targetFile, ImreadModes.Grayscale);
                    using var targetMat   = new GpuMat();
                    targetMat.Upload(targetImage);

                    //                using var difference = new Mat();
                    //                Cv2.Subtract(sourceImage, targetImage, difference);
                    //
                    //                Cv2.Split(difference, out var split);
                    //                var r = split[0];
                    //                var g = split[1];
                    //                var b = split[2];
                    //                var completeMatch = Cv2.CountNonZero(r) == 0 && Cv2.CountNonZero(g) == 0 && Cv2.CountNonZero(b) == 0;
                    using var targetDescriptors = new GpuMat();
                    //var targetKeyPoints = new VectorOfKeyPoint();

                    using var detector2 = new CudaORBDetector();
                    var targetKeyPoints = detector2.Detect(targetMat, null);
                    detector2.Compute(targetMat, new VectorOfKeyPoint(targetKeyPoints), targetDescriptors);
                    //detector.DetectAndCompute(targetImage, null, targetKeyPoints, targetDescriptors, false);

                    // Needed to compensate for some crashes.
                    // See: https://stackoverflow.com/questions/25089393/opencv-flannbasedmatcher
                    if (sourceKeyPoints.Length >= 2 && targetKeyPoints.Length >= 2)
                    {
                        using var matches = new VectorOfVectorOfDMatch();
                        using var matcher = new CudaBFMatcher(DistanceType.Hamming);
                        matcher.KnnMatch(sourceDescriptors, targetDescriptors, matches, KnnMatchValue);
                        var goodPoints = matches.ToArrayOfArray().Where(match => match.Length > 1)
                                         .Where(match => match[0].Distance < match[1].Distance * MatchDistance)
                                         //.Select(match => match[0])
                                         .ToArray();

                        var matchCount = sourceKeyPoints.Length >= targetKeyPoints.Length
                            ? sourceKeyPoints.Length
                            : targetKeyPoints.Length;

                        var matchQuality = (float)goodPoints.Length / matchCount;

                        if (matchQuality >= MinimumMatchQuality)
                        {
                            using var outputImage       = new Mat();
                            using var scaledOutputImage = new Mat();
                            Features2DToolbox.DrawMatches(
                                sourceImage, new VectorOfKeyPoint(sourceKeyPoints),
                                targetImage, new VectorOfKeyPoint(targetKeyPoints),
                                new VectorOfVectorOfDMatch(goodPoints), outputImage,
                                new Bgr(System.Drawing.Color.Yellow).MCvScalar,
                                new Bgr(System.Drawing.Color.Red).MCvScalar);
                            CvInvoke.Resize(outputImage, scaledOutputImage, System.Drawing.Size.Empty, 0.1f, 0.1f);
                            Application.Current?.Dispatcher?.Invoke(() => CvInvoke.Imshow("Match preview", scaledOutputImage));
                            //Cv2.ImWrite(targetFile + ".comparison.jpg", scaledOutputImage);

                            var sb = new StringBuilder();
                            sb.AppendLine($"{DateTime.Now} Matching:");
                            sb.AppendLine($"Source: {sourceFile}");
                            sb.AppendLine($"Target: {targetFile}");
                            sb.Append($"Match found with quality: {matchQuality}");
                            output.Add(sb.ToString());
                        }
                    }
                }
                catch (Exception e)
                {
                    var sb        = new StringBuilder();
                    var exception = e.ToString().Replace(Environment.NewLine, " ");
                    sb.Append($"{DateTime.Now} Unable to match file: {targetFile}: {exception}");
                    output.Add(sb.ToString());
                }
            });

            return(matchingFiles.ToArray());
        }
Exemplo n.º 24
0
 /// <summary>
 /// Find the k-nearest match
 /// </summary>
 /// <param name="queryDescriptors">An n x m matrix of descriptors to be query for nearest neighbors. n is the number of descriptor and m is the size of the descriptor</param>
 /// <param name="k">Number of nearest neighbors to search for</param>
 /// <param name="mask">Can be null if not needed. An n x 1 matrix. If 0, the query descriptor in the corresponding row will be ignored.</param>
 /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
 /// <param name="compactResult">Parameter used when the mask (or masks) is not empty. If compactResult is false, the matches vector has the same size as queryDescriptors rows. If compactResult is true, the matches vector does not contain matches for fully masked-out query descriptors.</param>
 /// <param name="trainDescriptors">Train set of descriptors. This set is not added to the train descriptors collection stored in the class object.</param>
 public void KnnMatch(IInputArray queryDescriptors, IInputArray trainDescriptors, VectorOfVectorOfDMatch matches, int k, IInputArray mask = null, bool compactResult = false)
 {
     using (InputArray iaQueryDescriptors = queryDescriptors.GetInputArray())
         using (InputArray iaTrainDescriptors = trainDescriptors.GetInputArray())
             using (InputArray iaMask = (mask == null ? InputArray.GetEmpty() : mask.GetInputArray()))
                 CudaInvoke.cveCudaDescriptorMatcherKnnMatch(_ptr, iaQueryDescriptors, iaTrainDescriptors, matches, k, iaMask, compactResult);
 }
Exemplo n.º 25
0
        public Bitmap GetAlignedImage(Bitmap imageToAlign, Bitmap referenceImage, ImageAlignmentType imageAlignmentType)
        {
            if (imageAlignmentType == ImageAlignmentType.CROP || imageAlignmentType == ImageAlignmentType.MAP)
            {
                Tuple <int, int> offsets = ImageFeatureDetector.GetXYOffsets(imageToAlign, referenceImage);

                if (imageAlignmentType == ImageAlignmentType.CROP)
                {
                    return(GetCroppedImage(imageToAlign, offsets.Item1, offsets.Item2, referenceImage.Width, referenceImage.Height));
                }

                Bitmap bmp = new Bitmap(referenceImage.Width, referenceImage.Height);
                using (Graphics g = Graphics.FromImage(bmp))
                    using (Brush brush = new SolidBrush(Color.Black))
                    {
                        g.FillRectangle(brush, 0, 0, bmp.Width, bmp.Height);
                        g.DrawImage(imageToAlign, -offsets.Item1, -offsets.Item2);
                    }

                return(bmp);
            }

            using (Image <Bgr, byte> alignImg = new Image <Bgr, byte>(imageToAlign))
                using (Image <Bgr, byte> refImg = new Image <Bgr, byte>(referenceImage))
                    using (Mat alignMat = alignImg.Mat)
                        using (Mat refMat = refImg.Mat)
                        {
                            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                            {
                                MatchingTechnique matchingTechnique;
                                if (imageAlignmentType == ImageAlignmentType.FASTWARP)
                                {
                                    matchingTechnique = MatchingTechnique.FAST;
                                }
                                else if (imageAlignmentType == ImageAlignmentType.FULLWARP)
                                {
                                    matchingTechnique = MatchingTechnique.ORB;
                                }
                                else
                                {
                                    throw new NotImplementedException();
                                }

                                ImageFeatureDetector.FindMatches(alignMat, refMat, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, matches, out Mat mask, out Mat homography, matchingTechnique, 1.0f);

                                try
                                {
                                    using (Mat result = new Mat())
                                    {
                                        Features2DToolbox.DrawMatches(alignMat, modelKeyPoints, refMat, observedKeyPoints, matches, result, new MCvScalar(255, 0, 0), new MCvScalar(0, 0, 255), mask);
                                        result.Save(@"D:\Downloads\Draw.jpg");
                                    }

                                    using (Mat warped = new Mat())
                                    {
                                        if (homography == null)
                                        {
                                            throw new Exception("Could not determine homography between images.");
                                        }

                                        CvInvoke.WarpPerspective(alignMat, warped, homography, refMat.Size);

                                        return(warped.ToBitmap());
                                    }
                                }
                                catch (Exception)
                                {
                                    throw;
                                }
                                finally
                                {
                                    mask.Dispose();
                                    homography.Dispose();
                                }
                            }
                        }
        }
Exemplo n.º 26
0
        public void Match(IInputArray queryDescriptors, IInputArray trainDescriptors, VectorOfVectorOfDMatch matches)
        {
            var _bruteForceMatcher = CreateMatcher();

            _bruteForceMatcher.Add(queryDescriptors);
            _bruteForceMatcher.KnnMatch(trainDescriptors, matches, 1, null);
            _bruteForceMatcher.Clear();
        }
Exemplo n.º 27
0
        private static VectorOfPoint ProcessImageFLANN(Image <Gray, byte> template, Image <Gray, byte> sceneImage)
        {
            try
            {
                // initialization
                VectorOfPoint    finalPoints        = null;
                Mat              homography         = null;
                VectorOfKeyPoint templateKeyPoints  = new VectorOfKeyPoint();
                VectorOfKeyPoint sceneKeyPoints     = new VectorOfKeyPoint();
                Mat              tempalteDescriptor = new Mat();
                Mat              sceneDescriptor    = new Mat();

                Mat    mask;
                int    k = 2;
                double uniquenessthreshold     = 0.80;
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                // feature detectino and description
                KAZE featureDetector = new KAZE();
                featureDetector.DetectAndCompute(template, null, templateKeyPoints, tempalteDescriptor, false);
                featureDetector.DetectAndCompute(sceneImage, null, sceneKeyPoints, sceneDescriptor, false);


                // Matching

                //KdTreeIndexParams ip = new KdTreeIndexParams();
                //var ip = new AutotunedIndexParams();
                var               ip      = new LinearIndexParams();
                SearchParams      sp      = new SearchParams();
                FlannBasedMatcher matcher = new FlannBasedMatcher(ip, sp);


                matcher.Add(tempalteDescriptor);
                matcher.KnnMatch(sceneDescriptor, matches, k);

                mask = new Mat(matches.Size, 1, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessthreshold, mask);

                int count = Features2DToolbox.VoteForSizeAndOrientation(templateKeyPoints, sceneKeyPoints, matches, mask, 1.5, 20);

                if (count >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(templateKeyPoints,
                                                                                          sceneKeyPoints, matches, mask, 5);
                }

                if (homography != null)
                {
                    System.Drawing.Rectangle rect = new System.Drawing.Rectangle(System.Drawing.Point.Empty, template.Size);
                    PointF[] pts = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    pts = CvInvoke.PerspectiveTransform(pts, homography);
                    System.Drawing.Point[] points = Array.ConvertAll <PointF, System.Drawing.Point>(pts, System.Drawing.Point.Round);
                    finalPoints = new VectorOfPoint(points);
                }

                return(finalPoints);
            }
            catch (Exception ex)
            {
                throw new Exception(ex.Message);
            }
        }
Exemplo n.º 28
0
        /// <summary>
        /// The method used to discover similarities amongst the images, and populating arrays.
        /// </summary>
        /// <param name="m_modelImage"> The model image (library basic). </param>
        /// <param name="m_observedImage"> The observed image (test).  </param>
        /// <param name="d_matchTime"> The output total time for computing the homography matrix. </param>
        /// <param name="v_modelKeyPoints"></param>
        /// <param name="v_observedKeyPoints"></param>
        /// <param name="v_matches"></param>
        /// <param name="m_mask"></param>
        /// <param name="m_homography"></param>
        /// <param name="l_score"> Field contains the score of matching. </param>
        public static void FindMatch(Mat m_modelImage, Mat m_observedImage, out double d_matchTime, out VectorOfKeyPoint v_modelKeyPoints,
                                     out VectorOfKeyPoint v_observedKeyPoints, VectorOfVectorOfDMatch v_matches, out Mat m_mask,
                                     out Mat m_homography, out long l_score)
        {
            ErrInfLogger.LockInstance.InfoLog("Start of the FindMatch");

            TimerAbstraction _tim = new TimerRefinedAbstraction();

            _tim._iTimer = new TimerFractional();

            m_homography = null;

            v_modelKeyPoints    = new VectorOfKeyPoint();
            v_observedKeyPoints = new VectorOfKeyPoint();

            KAZE featureDetector = new KAZE();

            Mat modelDescriptors = new Mat();

            featureDetector.DetectAndCompute(m_modelImage, null, v_modelKeyPoints, modelDescriptors, false);

            _tim.MeasureStart();

            Mat observedDescriptors = new Mat();

            featureDetector.DetectAndCompute(m_observedImage, null, v_observedKeyPoints, observedDescriptors, false);

            // KdTree for faster results / less accuracy
            using (KdTreeIndexParams ip = new KdTreeIndexParams())
                using (SearchParams sp = new SearchParams())
                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, v_matches, SettingsContainer.Instance.i_K, null);
                        m_mask = new Mat(v_matches.Size, 1, DepthType.Cv8U, 1);
                        m_mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(v_matches, SettingsContainer.Instance.d_UniquenessThreshold, m_mask);

                        // Calculate score based on matches size
                        // ---------------------------------------------->
                        l_score = 0;
                        for (int i = 0; i < v_matches.Size; i++)
                        {
                            if (m_mask.GetData(i)[0] == 0)
                            {
                                continue;
                            }
                            foreach (var e in v_matches[i].ToArray())
                            {
                                ++l_score;
                            }
                        }
                        // <----------------------------------------------

                        int nonZeroCount = CvInvoke.CountNonZero(m_mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(v_modelKeyPoints, v_observedKeyPoints, v_matches,
                                                                                       m_mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                m_homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(v_modelKeyPoints, v_observedKeyPoints,
                                                                                                        v_matches, m_mask, 2);
                            }
                        }
                    }
            _tim.MeasureStop();
            d_matchTime = Math.Round(_tim.MeasureResult().TotalMilliseconds, 2);
            _tim.MeasureRestart();

            ErrInfLogger.LockInstance.InfoLog("End of the FindMatch");
        }
Exemplo n.º 29
0
        //public bool EyesNotClosed(Image<Bgr, byte> imageFrame)
        //{
        //    var grayframe = imageFrame.Convert<Gray, byte>();
        //    var eyes = EyeCascadeClassifier.DetectMultiScale(grayframe, 1.1, 2, Size.Empty, Size.Empty);
        //    if (eyes != null && eyes.Length == 2)
        //        return true;
        //    return false;
        //}

        private void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

#if !__IOS__
            if (CudaInvoke.HasCuda)
            {
                CudaSURF surfCuda = new CudaSURF((float)hessianThresh);
                using (GpuMat gpuModelImage = new GpuMat(modelImage))
                    //extract features from the object image
                    using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
                            {
                                surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuMat gpuObservedImage = new GpuMat(observedImage))
                                    using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                        //using (GpuMat tmp = new GpuMat())
                                        //using (Stream stream = new Stream())
                                        {
                                            matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                                            surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                            mask.SetTo(new MCvScalar(255));
                                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                                            if (nonZeroCount >= 4)
                                            {
                                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                                if (nonZeroCount >= 4)
                                                {
                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                                }
                                            }
                                        }
                                watch.Stop();
                            }
            }
            else
#endif
            {
                using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
                    {
                        SURF surfCPU = new SURF(hessianThresh);
                        //extract features from the object image
                        UMat modelDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        watch = Stopwatch.StartNew();

                        // extract features from the observed image
                        UMat observedDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                        BFMatcher matcher = new BFMatcher(DistanceType.L2);
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                            }
                        }

                        watch.Stop();
                    }
            }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 30
0
 /// <summary>
 /// Recover the homography matrix using RANDSAC. If the matrix cannot be recovered, null is returned.
 /// </summary>
 /// <param name="model">The model keypoints</param>
 /// <param name="observed">The observed keypoints</param>
 /// <param name="ransacReprojThreshold">
 /// The maximum allowed reprojection error to treat a point pair as an inlier. 
 /// If srcPoints and dstPoints are measured in pixels, it usually makes sense to set this parameter somewhere in the range 1 to 10.
 /// </param>
 /// <param name="mask">
 /// The mask matrix of which the value might be modified by the function. 
 /// As input, if the value is 0, the corresponding match will be ignored when computing the homography matrix. 
 /// If the value is 1 and RANSAC determine the match is an outlier, the value will be set to 0.
 /// </param>
 /// <returns>The homography matrix, if it cannot be found, null is returned</returns>
 /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
 public static Mat GetHomographyMatrixFromMatchedFeatures(VectorOfKeyPoint model,
    VectorOfKeyPoint observed, VectorOfVectorOfDMatch matches, Mat mask, double ransacReprojThreshold)
 {
    Mat homography = new Mat();
    bool found = CvInvoke.getHomographyMatrixFromMatchedFeatures(model, observed, matches, mask,
       ransacReprojThreshold, homography);
    if (found)
    {
       return homography;
    }
    else
    {
       homography.Dispose();
       return null;
    }
 }
Exemplo n.º 31
0
        public static bool TestFeature2DTracker(Feature2D keyPointDetector, Feature2D descriptorGenerator)
        {
            //for (int k = 0; k < 1; k++)
            {
                Feature2D feature2D = null;
                if (keyPointDetector == descriptorGenerator)
                {
                    feature2D = keyPointDetector as Feature2D;
                }

                Mat modelImage = EmguAssert.LoadMat("box.png");
                //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg");
                //modelImage = modelImage.Resize(400, 400, true);

                //modelImage._EqualizeHist();

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                Mat modelDescriptors            = new Mat();
                if (feature2D != null)
                {
                    feature2D.DetectAndCompute(modelImage, null, modelKeypoints, modelDescriptors, false);
                }
                else
                {
                    keyPointDetector.DetectRaw(modelImage, modelKeypoints);
                    descriptorGenerator.Compute(modelImage, modelKeypoints, modelDescriptors);
                }
                stopwatch.Stop();
                EmguAssert.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg");
                Image <Gray, Byte> observedImage = EmguAssert.LoadImage <Gray, byte>("box_in_scene.png");
                //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0));
                //image = image.Resize(400, 400, true);

                //observedImage._EqualizeHist();
                #region extract features from the observed image
                stopwatch.Reset();
                stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                using (Mat observedDescriptors = new Mat())
                {
                    if (feature2D != null)
                    {
                        feature2D.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false);
                    }
                    else
                    {
                        keyPointDetector.DetectRaw(observedImage, observedKeypoints);
                        descriptorGenerator.Compute(observedImage, observedKeypoints, observedDescriptors);
                    }

                    stopwatch.Stop();
                    EmguAssert.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                    #endregion

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = modelImage.ToImage <Gray, Byte>().ConcateVertical(observedImage);

                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    Mat homography = null;

                    stopwatch.Reset();
                    stopwatch.Start();

                    int          k  = 2;
                    DistanceType dt = modelDescriptors.Depth == CvEnum.DepthType.Cv8U ? DistanceType.Hamming : DistanceType.L2;
                    //using (Matrix<int> indices = new Matrix<int>(observedDescriptors.Rows, k))
                    //using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
                    using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        using (BFMatcher matcher = new BFMatcher(dt))
                        {
                            //ParamDef[] parameterDefs = matcher.GetParams();
                            matcher.Add(modelDescriptors);
                            matcher.KnnMatch(observedDescriptors, matches, k, null);

                            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            //mask.SetValue(255);
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2);
                                }
                            }
                        }
                    stopwatch.Stop();
                    EmguAssert.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds));

                    bool success = false;
                    if (homography != null)
                    {
                        PointF[] points = pts.Clone() as PointF[];
                        points = CvInvoke.PerspectiveTransform(points, homography);
                        //homography.ProjectPoints(points);

                        for (int i = 0; i < points.Length; i++)
                        {
                            points[i].Y += modelImage.Height;
                        }

                        res.DrawPolyline(
#if NETFX_CORE
                            Extensions.
#else
                            Array.
#endif
                            ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);

                        success = true;
                    }
                    //Emgu.CV.UI.ImageViewer.Show(res);
                    return(success);
                }



                /*
                 * stopwatch.Reset(); stopwatch.Start();
                 * //set the initial region to be the whole image
                 * using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size))
                 * {
                 * priorMask.SetValue(1.0);
                 * homography = tracker.CamShiftTrack(
                 *    observedFeatures,
                 *    (RectangleF)observedImage.ROI,
                 *    priorMask);
                 * }
                 * Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                 *
                 * if (homography != null) //set the initial tracking window to be the whole image
                 * {
                 * PointF[] points = pts.Clone() as PointF[];
                 * homography.ProjectPoints(points);
                 *
                 * for (int i = 0; i < points.Length; i++)
                 *    points[i].Y += modelImage.Height;
                 * res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                 * return true;
                 * }
                 * else
                 * {
                 * return false;
                 * }*/
            }
        }
        private void CaptureOnImageGrabbed(object sender, EventArgs eventArgs)
        {
            var capture = (Capture) sender;

            var frame = new Mat();
            capture.Retrieve(frame);

            // 1. get key points
            var keyPoints = new VectorOfKeyPoint(_detector.Detect(frame));
            _tempCloudPoints.SetKeyFeatures(_selectedFrameIndex, keyPoints);

            // 2. get descripters
            var descripters = new Mat();
            _descripter.Compute(frame, keyPoints, descripters);

            // draw keypoints
            var imageFrame = new Mat();
            Features2DToolbox.DrawKeypoints(frame, keyPoints, imageFrame, new Bgr(Color.DarkBlue),
                Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            if (_selectedFrameIndex != 0)
            {
                var previousKeyPoints = _tempCloudPoints.GetKeyFeatures(_selectedFrameIndex - 1);
                var previousKeyDescripters = _previousDescripters;

                const int k = 2;
                const double uniquenessThreshold = 0.8;

                // 3. compute all matches with previous frame
                var matches = new VectorOfVectorOfDMatch();
                var matcher = GetNativeMatcher(SelectedMatcher);
                matcher.Add(previousKeyDescripters);

                matcher.KnnMatch(descripters, matches, k, null);

                var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                Features2DToolbox.VoteForSizeAndOrientation(previousKeyPoints, keyPoints,
                       matches, mask, 1.5, 20);
                Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(previousKeyPoints,
                            keyPoints, matches, mask, 2);

                var managedMask = mask.GetData();

                // 4. separate good matches
                var currentKeys = keyPoints;

                for (int i = 0; i < matches.Size; i++)
                {
                    var match = matches[i][0];
                    // filter wrong matches
                    if (managedMask[i] == 1)
                    {
                        var previousIndex = match.TrainIdx;
                        var currentIndex = match.QueryIdx;

                        var previousPoint = previousKeyPoints[previousIndex].Point;
                        var currentPoint = currentKeys[currentIndex].Point;

                        _tempCloudPoints.Unite(_selectedFrameIndex - 1, previousIndex,
                            _selectedFrameIndex, currentIndex);

                        CvInvoke.Line(imageFrame,
                            Point.Round(previousPoint),
                            Point.Round(currentPoint),
                            new Bgr(Color.Red).MCvScalar,
                            2);
                    }
                }
            }

            _previousDescripters = descripters;

            PreviewImageSource = imageFrame;

            _selectedFrameIndex++;
            RaisePropertyChanged("Progress");
            RaisePropertyChanged("ProgressText");
            if (_selectedFrameIndex == _framesCount)
            {
                GeneratingStates = FeatureGeneratingStates.Finished;
            }
        }
Exemplo n.º 33
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L2);

                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);

                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImages">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Rectangle Draw(Mat modelImage, Mat observedImages, out long matchTime)
        {
            List <Mat> humans = new List <Mat>();

            matchTime = 0;
            Image <Bgr, Byte> image = modelImage.ToImage <Bgr, Byte>();

            MCvObjectDetection[] rects;
            using (HOGDescriptor hog = new HOGDescriptor())
            {
                float[] desc = HOGDescriptor.GetDefaultPeopleDetector();
                hog.SetSVMDetector(desc);

                rects = hog.DetectMultiScale(image);

                foreach (MCvObjectDetection rect in rects)
                {
                    humans.Add(new Mat(image.Mat, rect.Rect));
                }
            }
            Mat       result    = new Mat();
            Rectangle human     = new Rectangle();
            long      pre_score = 0;
            VectorOfVectorOfDMatch pre_matches = new VectorOfVectorOfDMatch();

            for (int i = 0; i < humans.Count; i++)
            {
                Mat observedImage = humans[i];
                Mat homography;
                VectorOfKeyPoint modelKeyPoints;
                VectorOfKeyPoint observedKeyPoints;
                long             score = 0;
                //VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
                using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                {
                    Mat mask;
                    FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                              out mask, out homography, out score);


                    //Draw the matched keypoints
                    Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                  matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                    #region draw the projected region on the image
                    if (homography != null)
                    {
                        //draw a rectangle along the projected model
                        Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                        PointF[]  pts  = new PointF[]
                        {
                            new PointF(rect.Left, rect.Bottom),
                            new PointF(rect.Right, rect.Bottom),
                            new PointF(rect.Right, rect.Top),
                            new PointF(rect.Left, rect.Top)
                        };
                        pts = CvInvoke.PerspectiveTransform(pts, homography);

                        Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                        using (VectorOfPoint vp = new VectorOfPoint(points))
                        {
                            CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                        }
                    }

                    #endregion
                    if (pre_score < score)
                    {
                        pre_score = score;
                        human     = rects[i].Rect;
                    }
                }
            }
            return(human);
        }
Exemplo n.º 35
0
 /// <summary>
 /// Find the k-nearest match
 /// </summary>
 /// <param name="queryDescriptor">An n x m matrix of descriptors to be query for nearest neighbours. n is the number of descriptor and m is the size of the descriptor</param>
 /// <param name="k">Number of nearest neighbors to search for</param>
 /// <param name="mask">Can be null if not needed. An n x 1 matrix. If 0, the query descriptor in the corresponding row will be ignored.</param>
 /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
 public void KnnMatch(IInputArray queryDescriptor, VectorOfVectorOfDMatch matches, int k, IInputArray mask)
 {
    using (InputArray iaQueryDesccriptor = queryDescriptor.GetInputArray())
    using (InputArray iaMask = mask == null ? InputArray.GetEmpty() : mask.GetInputArray())
       DescriptorMatcherInvoke.CvDescriptorMatcherKnnMatch(_descriptorMatcherPtr, iaQueryDesccriptor, matches, k, iaMask);
 }
Exemplo n.º 36
0
        public void TestBruteForceHammingDistance()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, byte>       box   = new Image <Gray, byte>("box.png");
                FastDetector             fast  = new FastDetector(100, true);
                BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(box, modelKeypoints);
                Mat modelDescriptors = new Mat();
                brief.Compute(box, modelKeypoints, modelDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

                #region extract features from the observed image
                stopwatch.Reset(); stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(observedImage, observedKeypoints);
                Mat observedDescriptors = new Mat();
                brief.Compute(observedImage, observedKeypoints, observedDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Mat homography = null;
                using (GpuMat <Byte> gpuModelDescriptors = new GpuMat <byte>(modelDescriptors)) //initialization of GPU code might took longer time.
                {
                    stopwatch.Reset(); stopwatch.Start();
                    CudaBFMatcher hammingMatcher = new CudaBFMatcher(DistanceType.Hamming);

                    //BFMatcher hammingMatcher = new BFMatcher(BFMatcher.DistanceType.Hamming, modelDescriptors);
                    int            k        = 2;
                    Matrix <int>   trainIdx = new Matrix <int>(observedKeypoints.Size, k);
                    Matrix <float> distance = new Matrix <float>(trainIdx.Size);

                    using (GpuMat <Byte> gpuObservedDescriptors = new GpuMat <byte>(observedDescriptors))
                        //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true))
                        //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true))
                        using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        {
                            Stopwatch w2 = Stopwatch.StartNew();
                            //hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);
                            hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null, true);
                            w2.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec",
                                                          w2.ElapsedMilliseconds));
                            //gpuTrainIdx.Download(trainIdx);
                            //gpuDistance.Download(distance);


                            Mat mask = new Mat(distance.Rows, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints,
                                                                                           matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints,
                                                                                                          observedKeypoints, matches, mask, 2);
                                }
                                nonZeroCount = CvInvoke.CountNonZero(mask);
                            }

                            stopwatch.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec",
                                                          stopwatch.ElapsedMilliseconds));
                        }
                }

                if (homography != null)
                {
                    Rectangle rect = box.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    PointF[] points = CvInvoke.PerspectiveTransform(pts, homography);
                    //homography.ProjectPoints(points);

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = box.ConcateVertical(observedImage);

                    for (int i = 0; i < points.Length; i++)
                    {
                        points[i].Y += box.Height;
                    }
                    res.DrawPolyline(Array.ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                    //ImageViewer.Show(res);
                }
            }
        }
Exemplo n.º 37
0
      public static bool TestFeature2DTracker(Feature2D keyPointDetector, Feature2D descriptorGenerator)
      {
         //for (int k = 0; k < 1; k++)
         {
            Feature2D feature2D = null;
            if (keyPointDetector == descriptorGenerator)
            {
               feature2D = keyPointDetector as Feature2D;
            }

            Mat modelImage = EmguAssert.LoadMat("box.png");
            //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg");
            //modelImage = modelImage.Resize(400, 400, true);

            //modelImage._EqualizeHist();

            #region extract features from the object image
            Stopwatch stopwatch = Stopwatch.StartNew();
            VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
            Mat modelDescriptors = new Mat();
            if (feature2D != null)
            {
               feature2D.DetectAndCompute(modelImage, null, modelKeypoints, modelDescriptors, false);
            }
            else
            {
               keyPointDetector.DetectRaw(modelImage, modelKeypoints);
               descriptorGenerator.Compute(modelImage, modelKeypoints, modelDescriptors);
            }
            stopwatch.Stop();
            EmguAssert.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
            #endregion

            //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg");
            Image<Gray, Byte> observedImage = EmguAssert.LoadImage<Gray, byte>("box_in_scene.png");
            //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0));
            //image = image.Resize(400, 400, true);

            //observedImage._EqualizeHist();
            #region extract features from the observed image
            stopwatch.Reset();
            stopwatch.Start();
            VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
            using (Mat observedDescriptors = new Mat())
            {
               if (feature2D != null)
               {
                  
                  feature2D.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false);
               }
               else
               {
                  keyPointDetector.DetectRaw(observedImage, observedKeypoints);
                  descriptorGenerator.Compute(observedImage, observedKeypoints, observedDescriptors);
               }

               stopwatch.Stop();
               EmguAssert.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
            #endregion

               //Merge the object image and the observed image into one big image for display
               Image<Gray, Byte> res = modelImage.ToImage<Gray, Byte>().ConcateVertical(observedImage);

               Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
               PointF[] pts = new PointF[] { 
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};

               Mat homography = null;

               stopwatch.Reset();
               stopwatch.Start();

               int k = 2;
               DistanceType dt = modelDescriptors.Depth == CvEnum.DepthType.Cv8U ? DistanceType.Hamming : DistanceType.L2;
               //using (Matrix<int> indices = new Matrix<int>(observedDescriptors.Rows, k))
               //using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
               using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
               using (BFMatcher matcher = new BFMatcher(dt))
               {
                  //ParamDef[] parameterDefs = matcher.GetParams();
                  matcher.Add(modelDescriptors);
                  matcher.KnnMatch(observedDescriptors, matches, k, null);

                  Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                  mask.SetTo(new MCvScalar(255));
                  //mask.SetValue(255);
                  Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                  int nonZeroCount = CvInvoke.CountNonZero(mask);
                  if (nonZeroCount >= 4)
                  {
                     nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20);
                     if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2);
                  }
               }
               stopwatch.Stop();
               EmguAssert.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds));

               bool success = false;
               if (homography != null)
               {
                  PointF[] points = pts.Clone() as PointF[];
                  points = CvInvoke.PerspectiveTransform(points, homography);
                  //homography.ProjectPoints(points);

                  for (int i = 0; i < points.Length; i++)
                     points[i].Y += modelImage.Height;
                  
                  res.DrawPolyline(
#if NETFX_CORE
                     Extensions.
#else
                     Array.
#endif
                     ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);

                  success = true;
               }
               //Emgu.CV.UI.ImageViewer.Show(res);
               return success;
            }

            

            /*
            stopwatch.Reset(); stopwatch.Start();
            //set the initial region to be the whole image
            using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size))
            {
               priorMask.SetValue(1.0);
               homography = tracker.CamShiftTrack(
                  observedFeatures,
                  (RectangleF)observedImage.ROI,
                  priorMask);
            }
            Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds));
            
            if (homography != null) //set the initial tracking window to be the whole image
            {
               PointF[] points = pts.Clone() as PointF[];
               homography.ProjectPoints(points);

               for (int i = 0; i < points.Length; i++)
                  points[i].Y += modelImage.Height;
               res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
               return true;
            }
            else
            {
               return false;
            }*/

         }
      }
Exemplo n.º 38
0
        /// <summary>
        /// Recover the homography matrix using RANDSAC. If the matrix cannot be recovered, null is returned.
        /// </summary>
        /// <param name="model">The model keypoints</param>
        /// <param name="observed">The observed keypoints</param>
        /// <param name="ransacReprojThreshold">
        /// The maximum allowed reprojection error to treat a point pair as an inlier.
        /// If srcPoints and dstPoints are measured in pixels, it usually makes sense to set this parameter somewhere in the range 1 to 10.
        /// </param>
        /// <param name="mask">
        /// The mask matrix of which the value might be modified by the function.
        /// As input, if the value is 0, the corresponding match will be ignored when computing the homography matrix.
        /// If the value is 1 and RANSAC determine the match is an outlier, the value will be set to 0.
        /// </param>
        /// <returns>The homography matrix, if it cannot be found, null is returned</returns>
        /// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
        public static Mat GetHomographyMatrixFromMatchedFeatures(VectorOfKeyPoint model,
                                                                 VectorOfKeyPoint observed, VectorOfVectorOfDMatch matches, Mat mask, double ransacReprojThreshold)
        {
            Mat  homography = new Mat();
            bool found      = Features2DInvoke.getHomographyMatrixFromMatchedFeatures(model, observed, matches, mask,
                                                                                      ransacReprojThreshold, homography);

            if (found)
            {
                return(homography);
            }
            else
            {
                homography.Dispose();
                return(null);
            }
        }
Exemplo n.º 39
0
        static void Main(string[] args)
        {
            //Read Images
            String modelImageLocation    = @"C:\EMT\Image\EMT_Lab7\model.png";
            String observedImageLocation = @"C:\EMT\Image\EMT_Lab7\observed3.png";

            Mat modelImage    = CvInvoke.Imread(modelImageLocation, ImreadModes.Grayscale);
            Mat observedImage = CvInvoke.Imread(observedImageLocation, ImreadModes.Grayscale);

            //Create a sift variable
            int  threshold = 170;
            SIFT siftCPU   = new SIFT(threshold);

            //Extract features from the model image
            //Store Keypoint and Descriptors
            VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();
            Mat modelDescriptors            = new Mat();

            siftCPU.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);

            //Extract features from the observed image
            //Store Keypoint and Descriptors
            VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
            Mat observedDescriptors            = new Mat();

            siftCPU.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false);

            //Create a Brute-Force Matcher to match modelDescriptors to observedDescriptors
            //using DistanceType - Squared Eucledian distance
            //Stores results of matching to matches
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
            int k = 2;             // number of nearest neighbours to search

            matcher.Add(modelDescriptors);
            matcher.KnnMatch(observedDescriptors, matches, k, null);

            //Eliminate the matched features whose scale and rotation
            // do not agree with the majority's scale and rotation

            //Create a mask to store the matches
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));

            //Create Uniqueness threshold to set limit to uniqueness
            //Store results in mask
            double uniquenessThreshold = 0.8;

            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            //Calculate the Homography of the model and observed images
            Mat homography   = null;
            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeypoints, matches, mask, 1.5, 20);

                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeypoints, matches, mask, 2);
                }
            }

            //Re-read the images in color format
            modelImage    = CvInvoke.Imread(modelImageLocation, ImreadModes.AnyColor);
            observedImage = CvInvoke.Imread(observedImageLocation, ImreadModes.AnyColor);

            // Draw Match lines between matched points in the model and observed image
            Mat result = new Mat();

            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeypoints, matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

            //draw a rectangle of the matched object at the observed image
            if (homography != null)
            {
                Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);

                PointF[] pts = new PointF[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };

                pts = CvInvoke.PerspectiveTransform(pts, homography);

                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);

                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                }
            }

            CvInvoke.Imshow("Keypoint Image", result);
            CvInvoke.WaitKey(0);
        }
Exemplo n.º 40
0
        private void FindStopSign(Mat img, List <Mat> stopSignList, List <Rectangle> boxList, VectorOfVectorOfPoint contours, int[,] hierachy, int idx)
        {
            for (; idx >= 0; idx = hierachy[idx, 0])
            {
                using (VectorOfPoint c = contours[idx])
                    using (VectorOfPoint approx = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(c, approx, CvInvoke.ArcLength(c, true) * 0.02, true);
                        double area = CvInvoke.ContourArea(approx);
                        if (area > 200)
                        {
                            double ratio = CvInvoke.MatchShapes(_octagon, approx, Emgu.CV.CvEnum.ContoursMatchType.I3);

                            if (ratio > 0.1) //not a good match of contour shape
                            {
                                //check children
                                if (hierachy[idx, 2] >= 0)
                                {
                                    FindStopSign(img, stopSignList, boxList, contours, hierachy, hierachy[idx, 2]);
                                }
                                continue;
                            }

                            Rectangle box = CvInvoke.BoundingRectangle(c);

                            Mat candidate = new Mat();
                            using (Mat tmp = new Mat(img, box))
                                CvInvoke.CvtColor(tmp, candidate, ColorConversion.Bgr2Gray);

                            //set the value of pixels not in the contour region to zero
                            using (Mat mask = new Mat(candidate.Size.Height, candidate.Width, DepthType.Cv8U, 1))
                            {
                                mask.SetTo(new MCvScalar(0));
                                CvInvoke.DrawContours(mask, contours, idx, new MCvScalar(255), -1, LineType.EightConnected, null, int.MaxValue, new Point(-box.X, -box.Y));

                                double mean = CvInvoke.Mean(candidate, mask).V0;
                                CvInvoke.Threshold(candidate, candidate, mean, 255, ThresholdType.Binary);
                                CvInvoke.BitwiseNot(candidate, candidate);
                                CvInvoke.BitwiseNot(mask, mask);

                                candidate.SetTo(new MCvScalar(0), mask);
                            }

                            int              minMatchCount         = 8;
                            double           uniquenessThreshold   = 0.8;
                            VectorOfKeyPoint _observeredKeypoint   = new VectorOfKeyPoint();
                            Mat              _observeredDescriptor = new Mat();
                            _detector.DetectAndCompute(candidate, null, _observeredKeypoint, _observeredDescriptor, false);

                            if (_observeredKeypoint.Size >= minMatchCount)
                            {
                                int k = 2;

                                Mat mask;

                                using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                                {
                                    _modelDescriptorMatcher.KnnMatch(_observeredDescriptor, matches, k, null);
                                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                    mask.SetTo(new MCvScalar(255));
                                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                                }

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= minMatchCount)
                                {
                                    boxList.Add(box);
                                    stopSignList.Add(candidate);
                                }
                            }
                        }
                    }
            }
        }
Exemplo n.º 41
0
        private void CaptureOnImageGrabbed(object sender, EventArgs eventArgs)
        {
            var capture = (Capture)sender;

            var frame = new Mat();

            capture.Retrieve(frame);

            // 1. get key points
            var keyPoints = new VectorOfKeyPoint(_detector.Detect(frame));

            _tempCloudPoints.SetKeyFeatures(_selectedFrameIndex, keyPoints);

            // 2. get descripters
            var descripters = new Mat();

            _descripter.Compute(frame, keyPoints, descripters);

            // draw keypoints
            var imageFrame = new Mat();

            Features2DToolbox.DrawKeypoints(frame, keyPoints, imageFrame, new Bgr(Color.DarkBlue),
                                            Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            if (_selectedFrameIndex != 0)
            {
                var previousKeyPoints      = _tempCloudPoints.GetKeyFeatures(_selectedFrameIndex - 1);
                var previousKeyDescripters = _previousDescripters;

                const int    k = 2;
                const double uniquenessThreshold = 0.8;

                // 3. compute all matches with previous frame
                var matches = new VectorOfVectorOfDMatch();
                var matcher = GetNativeMatcher(SelectedMatcher);
                matcher.Add(previousKeyDescripters);

                matcher.KnnMatch(descripters, matches, k, null);

                var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                Features2DToolbox.VoteForSizeAndOrientation(previousKeyPoints, keyPoints,
                                                            matches, mask, 1.5, 20);
                Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(previousKeyPoints,
                                                                         keyPoints, matches, mask, 2);

                var managedMask = mask.GetData();

                // 4. separate good matches
                var currentKeys = keyPoints;

                for (int i = 0; i < matches.Size; i++)
                {
                    var match = matches[i][0];
                    // filter wrong matches
                    if (managedMask[i] == 1)
                    {
                        var previousIndex = match.TrainIdx;
                        var currentIndex  = match.QueryIdx;

                        var previousPoint = previousKeyPoints[previousIndex].Point;
                        var currentPoint  = currentKeys[currentIndex].Point;

                        _tempCloudPoints.Unite(_selectedFrameIndex - 1, previousIndex,
                                               _selectedFrameIndex, currentIndex);

                        CvInvoke.Line(imageFrame,
                                      Point.Round(previousPoint),
                                      Point.Round(currentPoint),
                                      new Bgr(Color.Red).MCvScalar,
                                      2);
                    }
                }
            }

            _previousDescripters = descripters;

            PreviewImageSource = imageFrame;

            _selectedFrameIndex++;
            RaisePropertyChanged("Progress");
            RaisePropertyChanged("ProgressText");
            if (_selectedFrameIndex == _framesCount)
            {
                GeneratingStates = FeatureGeneratingStates.Finished;
            }
        }
Exemplo n.º 42
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out bool goodMatch)
        {
            goodMatch = false;

            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // Bruteforce, slower but more accurate
                    // You can use KDTree for faster matching with slight loss in accuracy
                    using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                        using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                               matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                              observedKeyPoints, matches, mask, 2);
                                        goodMatch = nonZeroCount >= 7 && nonZeroCount >= observedKeyPoints.Size / 7;
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 43
0
        public static void FindMatch(Mat modelImage, Mat modelImage2, Mat observedImage,
                                     out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography,
                                     out VectorOfKeyPoint modelKeyPoints2, VectorOfVectorOfDMatch matches2, out Mat mask2, out Mat homography2)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography  = null;
            homography2 = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            modelKeyPoints2   = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read)) // Создаем объект модели изображения
                using (UMat uModelImage2 = modelImage2.GetUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                    {
                        KAZE featureDetector = new KAZE();

                        //извлекаем точки интереса из изображения объекта
                        Mat modelDescriptors = new Mat();
                        featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        Mat modelDescriptors2 = new Mat();
                        featureDetector.DetectAndCompute(uModelImage2, null, modelKeyPoints2, modelDescriptors2, false);

                        watch = Stopwatch.StartNew();

                        // извлекаем точки интереса из исследуемого изображения
                        Mat observedDescriptors = new Mat();
                        featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);


                        // Bruteforce, slower but more accurate
                        // You can use KDTree for faster matching with slight loss in accuracy
                        using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                            using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                                using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                                {
                                    matcher.Add(modelDescriptors);

                                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                    mask.SetTo(new MCvScalar(255));
                                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                                    if (nonZeroCount >= 4)
                                    {
                                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                   matches, mask, 1.5, 20);
                                        if (nonZeroCount >= 4)
                                        {
                                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                  observedKeyPoints, matches, mask, 2);
                                        }
                                    }
                                }

                        using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                            using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                                using (DescriptorMatcher matcher2 = new FlannBasedMatcher(ip, sp))
                                {
                                    matcher2.Add(modelDescriptors2);

                                    matcher2.KnnMatch(observedDescriptors, matches2, k, null);
                                    mask2 = new Mat(matches2.Size, 1, DepthType.Cv8U, 1);
                                    mask2.SetTo(new MCvScalar(255));
                                    Features2DToolbox.VoteForUniqueness(matches2, uniquenessThreshold, mask2);

                                    int nonZeroCount = CvInvoke.CountNonZero(mask2);
                                    if (nonZeroCount >= 4)
                                    {
                                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints2, observedKeyPoints,
                                                                                                   matches2, mask2, 1.5, 20);
                                        if (nonZeroCount >= 4)
                                        {
                                            homography2 = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints2,
                                                                                                                   observedKeyPoints, matches2, mask2, 2);
                                        }
                                    }
                                }


                        watch.Stop();
                    }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 44
0
        public DetectResult DetectSurf(Image <Bgr, byte> imgToFind, Image <Bgr, byte> imgScene, out long matchTime)
        {
            using (var matches = new VectorOfVectorOfDMatch())
            {
                matchTime = 0;

                try
                {
                    Mat mask;
                    Mat homography;
                    VectorOfKeyPoint modelKeyPoints;
                    VectorOfKeyPoint observedKeyPoints;
                    FindMatch(imgToFind.Mat, imgScene.Mat, out matchTime, out modelKeyPoints, out observedKeyPoints, matches, out mask, out homography);

                    float sharpnessValue = this.MatchesCount(matches, mask);

                    //Draw the matched keypoints
                    //var result = imgScene.Mat;
                    //var result =  new Mat();
                    //Features2DToolbox.DrawMatches(imgToFind.Mat, modelKeyPoints, imgScene.Mat, observedKeyPoints,
                    //matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                    #region draw the projected region on the image

                    if (homography != null && sharpnessValue > 5)
                    {
                        //draw a rectangle along the projected model
                        var rect = new Rectangle(Point.Empty, imgToFind.Size);

                        var p1 = new PointF(rect.Left, rect.Bottom);
                        var p2 = new PointF(rect.Right, rect.Bottom);
                        var p3 = new PointF(rect.Right, rect.Top);
                        var p4 = new PointF(rect.Left, rect.Top);

                        var pts = new[] { p1, p2, p3, p4 };
                        pts = CvInvoke.PerspectiveTransform(pts, homography);

                        //check if any opposite lines intersect
                        //if so, then don't add to final results
                        //we should never have 2 opposite sides intersecting
                        var l1 = new LineSegment2DF(pts[0], pts[1]);
                        var l2 = new LineSegment2DF(pts[1], pts[2]);
                        var l3 = new LineSegment2DF(pts[2], pts[3]);
                        var l4 = new LineSegment2DF(pts[3], pts[0]);

                        if (pts.All(x => x.X >= 0 && x.Y >= 0))                    // whole template should be on imageFrame
                        {
                            if (!(Intersects2Df(l1, l3) || Intersects2Df(l2, l4))) // opposite lines must not intersects
                            {
                                //var maxScale = 1.3;
                                //var scaleHorisontal = l1.Length > l3.Length ? l1.Length / l3.Length : l3.Length / l1.Length;
                                //var scaleVertical = l2.Length > l4.Length ? l2.Length / l4.Length : l4.Length / l2.Length;

                                //if (scaleHorisontal < maxScale && scaleVertical < maxScale)
                                {
                                    //var points = Array.ConvertAll(pts, Point.Round);
                                    //using (var vp = new VectorOfPoint(points))
                                    //{
                                    //    CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                                    //}

                                    var minX   = pts.Min(x => x.X);
                                    var maxX   = pts.Max(x => x.X);
                                    var minY   = pts.Min(x => x.Y);
                                    var maxY   = pts.Max(x => x.Y);
                                    var width  = (int)Math.Ceiling(maxX - minX);
                                    var height = (int)Math.Ceiling(maxY - minY);

                                    if ((minX + width <= imgScene.Width) && (minY + height <= imgScene.Height))
                                    {
                                        var imageRect = new Rectangle((int)Math.Ceiling(minX), (int)Math.Ceiling(minY), width, height);
                                        // var imageBitmap = imgScene.Copy(imageRect);

                                        return(new DetectResult
                                        {
                                            DetectedRectangle = imageRect,
                                            //   DetectedImage = imageBitmap,
                                            SharpnessValue = sharpnessValue
                                        });
                                    }
                                }
                            }
                        }
                    }
                    #endregion
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }

                return(null);
            }
        }
Exemplo n.º 45
0
        public static Mat Draw(Mat modelImage, Mat modelImage2, Mat observedImage, out long matchTime)
        {
            Mat homography, homography2;
            VectorOfKeyPoint modelKeyPoints, modelKeyPoints2;
            VectorOfKeyPoint observedKeyPoints;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                using (VectorOfVectorOfDMatch matches2 = new VectorOfVectorOfDMatch())
                {
                    Mat mask, mask2;
                    FindMatch(modelImage, modelImage2, observedImage,
                              out matchTime, out modelKeyPoints, out observedKeyPoints, matches, out mask, out homography,
                              out modelKeyPoints2, matches2, out mask2, out homography2);

                    //Draw the matched keypoints
                    Mat result = new Mat();
                    Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                  matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

                    Features2DToolbox.DrawMatches(modelImage2, modelKeyPoints2, observedImage, observedKeyPoints,
                                                  matches2, result, new MCvScalar(255, 0, 255), new MCvScalar(255, 255, 255), mask2);


                    #region draw the projected region on the image

                    if (homography != null)
                    {
                        //draw a rectangle along the projected model
                        Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                        PointF[]  pts  = new PointF[]
                        {
                            new PointF(rect.Left, rect.Bottom),
                            new PointF(rect.Right, rect.Bottom),
                            new PointF(rect.Right, rect.Top),
                            new PointF(rect.Left, rect.Top)
                        };
                        pts = CvInvoke.PerspectiveTransform(pts, homography);

#if NETFX_CORE
                        Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round);
#else
                        Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
#endif
                        using (VectorOfPoint vp = new VectorOfPoint(points))
                        {
                            CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                        }
                    }
                    #endregion

                    #region draw the projected region on the image

                    if (homography2 != null)
                    {
                        //draw a rectangle along the projected model
                        Rectangle rect = new Rectangle(Point.Empty, modelImage2.Size);
                        PointF[]  pts  = new PointF[]
                        {
                            new PointF(rect.Left, rect.Bottom),
                            new PointF(rect.Right, rect.Bottom),
                            new PointF(rect.Right, rect.Top),
                            new PointF(rect.Left, rect.Top)
                        };
                        pts = CvInvoke.PerspectiveTransform(pts, homography2);

#if NETFX_CORE
                        Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round);
#else
                        Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
#endif
                        using (VectorOfPoint vp = new VectorOfPoint(points))
                        {
                            CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 120, 255), 5);
                        }
                    }
                    #endregion


                    return(result);
                }
        }
Exemplo n.º 46
0
        //public static long Classify(VectorOfKeyPoint modelKeyPoints, Mat modelDescriptors, Mat observedImage, double uniquenessThreshold, int k, int detectionType)
        public static long Classify(Mat modelDescriptors, Mat observedImage, double uniquenessThreshold, int k, int detectionType)
        {
            var score = 0L;

            using (var matches = new VectorOfVectorOfDMatch())
            {
                Mat mask = null;
                //Mat homography = null;
                var observedKeyPoints = new VectorOfKeyPoint();
                var obsImage          = new Mat();
                CvInvoke.Threshold(observedImage, obsImage, 127.0, 255.0, ThresholdType.BinaryInv);
                using (UMat uObservedImage = obsImage.GetUMat(AccessType.Read))
                {
                    switch (detectionType)
                    {
                    default:
                        using (var featureDetector = new SIFT(0, 3, 0.04, 10.0, 1.6))
                        {
                            var observedDescriptors = new Mat();
                            featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                            using (var ip = new KdTreeIndexParams())
                                using (var sp = new SearchParams())
                                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                                    {
                                        matcher.Add(modelDescriptors);
                                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                        mask.SetTo(new MCvScalar(255));
                                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                                        score = 0;
                                        for (int i = 0; i < matches.Size; i++)
                                        {
                                            if (mask.GetData(i)[0] == 0)
                                            {
                                                continue;
                                            }
                                            foreach (var e in matches[i].ToArray())
                                            {
                                                ++score;
                                            }
                                        }
                                        //var nonZeroCount = CvInvoke.CountNonZero(mask);
                                        //if (nonZeroCount >= 4)
                                        //{
                                        //    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                        //    if (nonZeroCount >= 4)
                                        //        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                        //}
                                    }
                        }
                        break;

                    case 1:
                        using (var featureDetector = new KAZE())
                        {
                            var observedDescriptors = new Mat();
                            featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                            using (var ip = new KdTreeIndexParams())
                                using (var sp = new SearchParams())
                                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                                    {
                                        matcher.Add(modelDescriptors);
                                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                        mask.SetTo(new MCvScalar(255));
                                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                                        score = 0;
                                        for (int i = 0; i < matches.Size; i++)
                                        {
                                            if (mask.GetData(i)[0] == 0)
                                            {
                                                continue;
                                            }
                                            foreach (var e in matches[i].ToArray())
                                            {
                                                ++score;
                                            }
                                        }
                                        //var nonZeroCount = CvInvoke.CountNonZero(mask);
                                        //if (nonZeroCount >= 4)
                                        //{
                                        //    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                        //    if (nonZeroCount >= 4)
                                        //        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                        //}
                                    }
                        }
                        break;
                    }
                }
            }
            return(score);
        }
Exemplo n.º 47
0
 public SIFTMatchBehaviour(ImageData image1, ImageData image2, VectorOfVectorOfDMatch match) : base(image1,
                                                                                                    image2, match)
 {
 }
Exemplo n.º 48
0
        public AlignedResult CreateAlignedSecondImageKeypoints(SKBitmap firstImage, SKBitmap secondImage,
                                                               bool discardTransX, AlignmentSettings settings, bool keystoneRightOnFirst)
        {
#if __NO_EMGU__
            return(null);
#endif
            var result = new AlignedResult();

            var detector = new ORBDetector();
            const ImreadModes READ_MODE = ImreadModes.Color;

            var mat1                = new Mat();
            var descriptors1        = new Mat();
            var allKeyPointsVector1 = new VectorOfKeyPoint();
            CvInvoke.Imdecode(GetBytes(firstImage, 1), READ_MODE, mat1);
            detector.DetectAndCompute(mat1, null, allKeyPointsVector1, descriptors1, false);

            var mat2                = new Mat();
            var descriptors2        = new Mat();
            var allKeyPointsVector2 = new VectorOfKeyPoint();
            CvInvoke.Imdecode(GetBytes(secondImage, 1), READ_MODE, mat2);
            detector.DetectAndCompute(mat2, null, allKeyPointsVector2, descriptors2, false);

            const double THRESHOLD_PROPORTION = 1 / 4d;
            var          thresholdDistance    = Math.Sqrt(Math.Pow(firstImage.Width, 2) + Math.Pow(firstImage.Height, 2)) * THRESHOLD_PROPORTION;

            var distanceThresholdMask = new Mat(allKeyPointsVector2.Size, allKeyPointsVector1.Size, DepthType.Cv8U, 1);
            if (!settings.UseCrossCheck)
            {
                unsafe
                {
                    var maskPtr = (byte *)distanceThresholdMask.DataPointer.ToPointer();
                    for (var i = 0; i < allKeyPointsVector2.Size; i++)
                    {
                        var keyPoint2 = allKeyPointsVector2[i];
                        for (var j = 0; j < allKeyPointsVector1.Size; j++)
                        {
                            var keyPoint1        = allKeyPointsVector1[j];
                            var physicalDistance = CalculatePhysicalDistanceBetweenPoints(keyPoint2.Point, keyPoint1.Point);
                            if (physicalDistance < thresholdDistance)
                            {
                                *maskPtr = 255;
                            }
                            else
                            {
                                *maskPtr = 0;
                            }

                            maskPtr++;
                        }
                    }
                }
            }

            var vectorOfMatches = new VectorOfVectorOfDMatch();
            var matcher         = new BFMatcher(DistanceType.Hamming, settings.UseCrossCheck);
            matcher.Add(descriptors1);
            matcher.KnnMatch(descriptors2, vectorOfMatches, settings.UseCrossCheck ? 1 : 2, settings.UseCrossCheck ? new VectorOfMat() : new VectorOfMat(distanceThresholdMask));

            var goodMatches = new List <MDMatch>();
            for (var i = 0; i < vectorOfMatches.Size; i++)
            {
                if (vectorOfMatches[i].Size == 0)
                {
                    continue;
                }

                if (vectorOfMatches[i].Size == 1 ||
                    (vectorOfMatches[i][0].Distance < 0.75 * vectorOfMatches[i][1].Distance)) //make sure matches are unique
                {
                    goodMatches.Add(vectorOfMatches[i][0]);
                }
            }

            if (goodMatches.Count < settings.MinimumKeypoints)
            {
                return(null);
            }

            var pairedPoints = new List <PointForCleaning>();
            for (var ii = 0; ii < goodMatches.Count; ii++)
            {
                var keyPoint1 = allKeyPointsVector1[goodMatches[ii].TrainIdx];
                var keyPoint2 = allKeyPointsVector2[goodMatches[ii].QueryIdx];
                pairedPoints.Add(new PointForCleaning
                {
                    KeyPoint1 = keyPoint1,
                    KeyPoint2 = keyPoint2,
                    Data      = new KeyPointOutlierDetectorData
                    {
                        Distance = (float)CalculatePhysicalDistanceBetweenPoints(keyPoint1.Point, keyPoint2.Point),
                        Slope    = (keyPoint2.Point.Y - keyPoint1.Point.Y) / (keyPoint2.Point.X - keyPoint1.Point.X)
                    },
                    Match = new MDMatch
                    {
                        Distance = goodMatches[ii].Distance,
                        ImgIdx   = goodMatches[ii].ImgIdx,
                        QueryIdx = ii,
                        TrainIdx = ii
                    }
                });
            }

            if (settings.DrawKeypointMatches)
            {
                result.DirtyMatchesCount = pairedPoints.Count;
                result.DrawnDirtyMatches = DrawMatches(firstImage, secondImage, pairedPoints);
            }

            if (settings.DiscardOutliersByDistance || settings.DiscardOutliersBySlope)
            {
                //Debug.WriteLine("DIRTY POINTS START (ham,dist,slope,ydiff), count: " + pairedPoints.Count);
                //foreach (var pointForCleaning in pairedPoints)
                //{
                //    Debug.WriteLine(pointForCleaning.Match.Distance  + "," + pointForCleaning.Data.Distance + "," + pointForCleaning.Data.Slope + "," + Math.Abs(pointForCleaning.KeyPoint1.Point.Y - pointForCleaning.KeyPoint2.Point.Y));
                //}

                //Debug.WriteLine("DIRTY PAIRS:");
                //PrintPairs(pairedPoints);

                if (settings.DiscardOutliersByDistance)
                {
                    // reject distances and slopes more than some number of standard deviations from the median
                    var medianDistance = pairedPoints.OrderBy(p => p.Data.Distance).ElementAt(pairedPoints.Count / 2).Data.Distance;
                    var distanceStdDev = CalcStandardDeviation(pairedPoints.Select(p => p.Data.Distance).ToArray());
                    pairedPoints = pairedPoints.Where(p => Math.Abs(p.Data.Distance - medianDistance) < Math.Abs(distanceStdDev * (settings.KeypointOutlierThresholdTenths / 10d))).ToList();
                    //Debug.WriteLine("Median Distance: " + medianDistance);
                    //Debug.WriteLine("Distance Cleaned Points count: " + pairedPoints.Count);
                }

                if (settings.DiscardOutliersBySlope)
                {
                    var validSlopes = pairedPoints.Where(p => !float.IsNaN(p.Data.Slope) && float.IsFinite(p.Data.Slope)).ToArray();
                    var medianSlope = validSlopes.OrderBy(p => p.Data.Slope).ElementAt(validSlopes.Length / 2).Data.Slope;
                    var slopeStdDev = CalcStandardDeviation(validSlopes.Select(p => p.Data.Slope).ToArray());
                    pairedPoints = validSlopes.Where(p => Math.Abs(p.Data.Slope - medianSlope) < Math.Abs(slopeStdDev * (settings.KeypointOutlierThresholdTenths / 10d))).ToList();
                    //Debug.WriteLine("Median Slope: " + medianSlope);
                    //Debug.WriteLine("Slope Cleaned Points count: " + pairedPoints.Count);
                }

                //Debug.WriteLine("CLEAN POINTS START (ham,dist,slope,ydiff), count: " + pairedPoints.Count);
                //foreach (var pointForCleaning in pairedPoints)
                //{
                //    Debug.WriteLine(pointForCleaning.Match.Distance + "," + pointForCleaning.Data.Distance + "," + pointForCleaning.Data.Slope + "," + Math.Abs(pointForCleaning.KeyPoint1.Point.Y - pointForCleaning.KeyPoint2.Point.Y));
                //}

                //Debug.WriteLine("CLEANED PAIRS:");
                //PrintPairs(pairedPoints);

                for (var ii = 0; ii < pairedPoints.Count; ii++)
                {
                    var oldMatch = pairedPoints[ii].Match;
                    pairedPoints[ii].Match = new MDMatch
                    {
                        Distance = oldMatch.Distance,
                        ImgIdx   = oldMatch.ImgIdx,
                        QueryIdx = ii,
                        TrainIdx = ii
                    };
                }

                if (settings.DrawKeypointMatches)
                {
                    result.CleanMatchesCount = pairedPoints.Count;
                    result.DrawnCleanMatches = DrawMatches(firstImage, secondImage, pairedPoints);
                }
            }

            var points1 = pairedPoints.Select(p => new SKPoint(p.KeyPoint1.Point.X, p.KeyPoint1.Point.Y)).ToArray();
            var points2 = pairedPoints.Select(p => new SKPoint(p.KeyPoint2.Point.X, p.KeyPoint2.Point.Y)).ToArray();


            var translation1 = FindVerticalTranslation(points1, points2, secondImage);
            var translated1  = SKMatrix.MakeTranslation(0, translation1);
            points2 = translated1.MapPoints(points2);

            var rotation1 = FindRotation(points1, points2, secondImage);
            var rotated1  = SKMatrix.MakeRotation(rotation1, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = rotated1.MapPoints(points2);

            var zoom1   = FindZoom(points1, points2, secondImage);
            var zoomed1 = SKMatrix.MakeScale(zoom1, zoom1, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = zoomed1.MapPoints(points2);



            var translation2 = FindVerticalTranslation(points1, points2, secondImage);
            var translated2  = SKMatrix.MakeTranslation(0, translation2);
            points2 = translated2.MapPoints(points2);

            var rotation2 = FindRotation(points1, points2, secondImage);
            var rotated2  = SKMatrix.MakeRotation(rotation2, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = rotated2.MapPoints(points2);

            var zoom2   = FindZoom(points1, points2, secondImage);
            var zoomed2 = SKMatrix.MakeScale(zoom2, zoom2, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = zoomed2.MapPoints(points2);



            var translation3 = FindVerticalTranslation(points1, points2, secondImage);
            var translated3  = SKMatrix.MakeTranslation(0, translation3);
            points2 = translated3.MapPoints(points2);

            var rotation3 = FindRotation(points1, points2, secondImage);
            var rotated3  = SKMatrix.MakeRotation(rotation3, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = rotated3.MapPoints(points2);

            var zoom3   = FindZoom(points1, points2, secondImage);
            var zoomed3 = SKMatrix.MakeScale(zoom3, zoom3, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = zoomed3.MapPoints(points2);


            var keystoned1 = SKMatrix.MakeIdentity();
            var keystoned2 = SKMatrix.MakeIdentity();
            if (settings.DoKeystoneCorrection)
            {
                keystoned1 = FindTaper(points2, points1, secondImage, keystoneRightOnFirst);
                points1    = keystoned1.MapPoints(points1);
                keystoned2 = FindTaper(points1, points2, secondImage, !keystoneRightOnFirst);
                points2    = keystoned2.MapPoints(points2);
            }


            var horizontaled = SKMatrix.MakeIdentity();
            if (!discardTransX)
            {
                var horizontalAdj = FindHorizontalTranslation(points1, points2, secondImage);
                horizontaled = SKMatrix.MakeTranslation(horizontalAdj, 0);
                points2      = horizontaled.MapPoints(points2);
            }



            var tempMatrix1 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix1, translated1, rotated1);
            var tempMatrix2 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix2, tempMatrix1, zoomed1);

            var tempMatrix3 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix3, tempMatrix2, translated2);
            var tempMatrix4 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix4, tempMatrix3, rotated2);
            var tempMatrix5 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix5, tempMatrix4, zoomed2);

            var tempMatrix6 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix6, tempMatrix5, translated3);
            var tempMatrix7 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix7, tempMatrix6, rotated3);
            var tempMatrix8 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix8, tempMatrix7, zoomed3);


            var tempMatrix9 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix9, tempMatrix8, keystoned2);

            var tempMatrix10 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix10, tempMatrix9, horizontaled);

            var finalMatrix = tempMatrix10;
            result.TransformMatrix2 = finalMatrix;
            var alignedImage2 = new SKBitmap(secondImage.Width, secondImage.Height);
            using (var canvas = new SKCanvas(alignedImage2))
            {
                canvas.SetMatrix(finalMatrix);
                canvas.DrawBitmap(secondImage, 0, 0);
            }
            result.AlignedBitmap2 = alignedImage2;


            result.TransformMatrix1 = keystoned1;
            var alignedImage1 = new SKBitmap(firstImage.Width, firstImage.Height);
            using (var canvas = new SKCanvas(alignedImage1))
            {
                canvas.SetMatrix(keystoned1);
                canvas.DrawBitmap(firstImage, 0, 0);
            }
            result.AlignedBitmap1 = alignedImage1;


            return(result);
        }
Exemplo n.º 49
0
        public static void FindMatches(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, MatchingTechnique matchingTechnique, float keyPointFilter = 1, double detectorParameter = -1)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();


            Feature2D    detector;
            Feature2D    descriptor;
            DistanceType distanceType;

            if (matchingTechnique == MatchingTechnique.FAST)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 20;
                }

                detector     = new FastDetector((int)detectorParameter);
                descriptor   = new BriefDescriptorExtractor();
                distanceType = DistanceType.Hamming;
            }
            else if (matchingTechnique == MatchingTechnique.ORB)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 100000;
                }

                detector     = new ORBDetector((int)detectorParameter);
                descriptor   = detector;
                distanceType = DistanceType.Hamming;
            }
            else if (matchingTechnique == MatchingTechnique.SURF)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 300;
                }

                detector     = new SURF(detectorParameter);
                descriptor   = detector;
                distanceType = DistanceType.L2;
            }
            else
            {
                throw new NotImplementedException($"{matchingTechnique} not supported.");
            }

            // Extract features from model image.
            UMat modelDescriptors = new UMat();

            detector.DetectRaw(modelImage, modelKeyPoints, null);
            Console.WriteLine($"modelKeyPoints: {modelKeyPoints.Size}");
            if (keyPointFilter < 2)
            {
                modelKeyPoints = GetBestKeypointsPercent(modelKeyPoints, keyPointFilter);
            }
            else
            {
                modelKeyPoints = GetBestKeypointsCount(modelKeyPoints, (int)keyPointFilter);
            }
            descriptor.Compute(modelImage, modelKeyPoints, modelDescriptors);

            // Extract features from observed image.
            UMat observedDescriptors = new UMat();

            detector.DetectRaw(observedImage, observedKeyPoints, null);
            Console.WriteLine($"observedKeyPoints: {observedKeyPoints.Size}");
            if (keyPointFilter < 2)
            {
                observedKeyPoints = GetBestKeypointsPercent(observedKeyPoints, keyPointFilter);
            }
            else
            {
                observedKeyPoints = GetBestKeypointsCount(observedKeyPoints, (int)keyPointFilter);
            }
            descriptor.Compute(observedImage, observedKeyPoints, observedDescriptors);

            // Match keypoints.
            BFMatcher matcher = new BFMatcher(distanceType);

            matcher.Add(modelDescriptors);
            matcher.KnnMatch(observedDescriptors, matches, k, null);

            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                }
            }
        }
Exemplo n.º 50
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            if (time == 10)
            {
                Mat frame = new Mat();
                capture.Retrieve(frame, 0);
                Mat grayVideo = new Mat();
                CvInvoke.CvtColor(frame, grayVideo, ColorConversion.Bgr2Gray);
                UMat videoDescriptors = new UMat();
                VectorOfKeyPoint videoKeyPoints = new VectorOfKeyPoint();
                calculatedescriptors(grayVideo, videoDescriptors, videoKeyPoints);
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                BFMatcher matcher = new BFMatcher(DistanceType.L2);
                matcher.Add(originalImageDescriptors);
                matcher.KnnMatch(videoDescriptors, matches, 2, null);
                Mat mask = mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
                Mat homography = new Mat();
                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(originalImageKeyPoints, videoKeyPoints,
                       matches, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(originalImageKeyPoints, videoKeyPoints, matches, mask, 2);
                }

                Mat result = new Mat();
                Features2DToolbox.DrawMatches(grayImage, originalImageKeyPoints, grayVideo, videoKeyPoints,
                   matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);
                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, grayImage.Size);
                    PointF[] pts = new PointF[]
                    {
                              new PointF(rect.Left, rect.Bottom),
                              new PointF(rect.Right, rect.Bottom),
                              new PointF(rect.Right, rect.Top),
                              new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                    Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                    viewer.Image = result;
                    
                }

                
                time = 0; 
            }
            else
            {
                time++;
            }
    }
Exemplo n.º 51
0
        public Image Compare2Features(
            string destFeatureFile,
            string origFeatureFile,
            string vpFileDest,
            string vpFileOrig,
            string destImageFile = "",
            string origImageFile = "",
            bool needMatchedImage = false)
        {
            EmguType destFeatures = Utils.ReadJsonFile<EmguType>(destFeatureFile);
            EmguType origFeatures = Utils.ReadJsonFile<EmguType>(origFeatureFile);

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            matcher.Add(origFeatures.Descriptors);
            matcher.KnnMatch(destFeatures.Descriptors, matches, 2, null);
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

            Mat homography = null;
            int Count = CvInvoke.CountNonZero(mask);      //用于寻找模板在图中的位置
            if (Count >= 4)
            {
                Count = Features2DToolbox.VoteForSizeAndOrientation(origFeatures.KeyPoints, destFeatures.KeyPoints, matches, mask, 1.5, 20);
                if (Count >= 4)
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(origFeatures.KeyPoints, destFeatures.KeyPoints, matches, mask, 2);
            }

            Mat showImage = null;
            Mat pointImage = null;
            if (needMatchedImage
                && !string.IsNullOrWhiteSpace(destImageFile)
                && !string.IsNullOrWhiteSpace(origImageFile))
            {

                Image<Gray, Byte> destImage = new Image<Gray, Byte>(destImageFile);
                Image<Gray, Byte> origImage = new Image<Gray, Byte>(origImageFile);

                showImage = new Mat(origImage.Size, DepthType.Cv8U, 3);
                pointImage = new Mat(origImage.Size, DepthType.Cv8U, 3);
                //add optical vp line

                string vpPath = Path.GetDirectoryName(vpFileDest);
                List<FileInfo> files = new List<FileInfo>();
                DirectoryInfo dirInfo = new DirectoryInfo(vpPath);
                FileInfo[] fsinfo = dirInfo.GetFiles();
                FileInfo[] vppFiles = fsinfo.Where(p => p.Name.Contains(".jpgpp.dat")).OrderBy(p => p.Name).ToArray();
                //FileInfo[] vpdFiles = fsinfo.Where(p => p.Name.Contains(".jpgpd.dat")).OrderBy(p => p.Name).ToArray();

                for (int k = 0; k < vppFiles.Length - 1; k++)
                {

                    VectorOfPointF vpDest = Utils.ReadJsonFile<VectorOfPointF>(vppFiles[k+1].FullName);
                    //VectorOfPointF vpOrig = Utils.ReadJsonFile<VectorOfPointF>(vpdFiles[k].FullName);
                    VectorOfPointF vpOrig = Utils.ReadJsonFile<VectorOfPointF>(vppFiles[k].FullName);

                    //Restart the start point of motion tracking.
                    if ((k+1) % Constants.DETECTIVE_GROUP_COUNT == 0)
                        continue;

                    Point[] pointsDest = Array.ConvertAll<PointF, Point>(vpDest.ToArray(), Point.Round);
                    Point[] pointsOirg = Array.ConvertAll<PointF, Point>(vpOrig.ToArray(), Point.Round);

                    for (int i = 0; i < pointsDest.Length; i++)
                    {
                        Point[] ps = { pointsDest[i], pointsOirg[i] };
                        CvInvoke.Polylines(pointImage, ps, true, new MCvScalar(0, 0, 255, 255));
                        //CvInvoke.Circle(pointImage, pointsOirg[i], 1, new MCvScalar(0, 255, 0, 255));
                    }
                }

                Image<Bgr, Byte> firstImg = new Image<Bgr, Byte>(origImageFile);
                Image<Bgr, Byte> lastImg = new Image<Bgr, Byte>("D:\\MyPrj\\mygitcode\\MyCode\\ExamVideoProcess\\ExamVideoProcess\\bin\\x64\\Debug\\initVideo\\30Grayimg.jpg");
                CvInvoke.AddWeighted(firstImg, 0.5, lastImg, 0.5, 0.0, showImage, DepthType.Cv8U);
                CvInvoke.AddWeighted(showImage, 0.5, pointImage, 0.5, 0.0, showImage, DepthType.Cv8U);

                /*
                Features2DToolbox.DrawMatches(origImage.Convert<Gray, Byte>().Mat, origFeatures.KeyPoints, destImage.Convert<Gray, Byte>().Mat, destFeatures.KeyPoints, matches, showImage, new MCvScalar(255, 0, 255), new MCvScalar(0, 255, 255), mask);
                if (homography != null)     //如果在图中找到了模板,就把它画出来
                {
                    Rectangle rect = new Rectangle(Point.Empty, origImage.Size);
                    PointF[] points = new PointF[]
                {
                  new PointF(rect.Left, rect.Bottom),
                  new PointF(rect.Right, rect.Bottom),
                  new PointF(rect.Right, rect.Top),
                  new PointF(rect.Left, rect.Top)
                };
                    points = CvInvoke.PerspectiveTransform(points, homography);
                    Point[] points2 = Array.ConvertAll<PointF, Point>(points, Point.Round);
                    VectorOfPoint vp = new VectorOfPoint(points2);
                    CvInvoke.Polylines(showImage, vp, true, new MCvScalar(255, 0, 0, 255), 15);

                }
                */
                return showImage.Bitmap;
            }
            return null;
        }