예제 #1
0
파일: Program.cs 프로젝트: neutmute/emgucv
      static void Main()
      {
         Application.EnableVisualStyles();
         Application.SetCompatibleTextRenderingDefault(false);

         using (Mat image = new Mat("pedestrian.png"))
         {
            
            long processingTime;
            Rectangle[] results;

            if (CudaInvoke.HasCuda)
            {
               using (GpuMat gpuMat = new GpuMat(image))
                  results = FindPedestrian.Find(gpuMat, out processingTime);
            }
            else
            {
               using (UMat uImage = image.GetUMat(AccessType.ReadWrite))
                  results = FindPedestrian.Find(uImage, out processingTime);
            }
            
            foreach (Rectangle rect in results)
            {
               CvInvoke.Rectangle(image, rect, new Bgr(Color.Red).MCvScalar);
            }
            ImageViewer.Show(
               image,
               String.Format("Pedestrian detection using {0} in {1} milliseconds.",
                  CudaInvoke.HasCuda ? "GPU" : 
                  CvInvoke.UseOpenCL ? "OpenCL":
                  "CPU",
                  processingTime));
         }
      }
예제 #2
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // KdTree for faster results / less accuracy
                    using (var ip = new Emgu.CV.Flann.KdTreeIndexParams())
                        using (var sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                // Calculate score based on matches size
                                // ---------------------------------------------->
                                score = 0;
                                for (int i = 0; i < matches.Size; i++)
                                {
                                    if (mask.GetData(i)[0] == 0)
                                    {
                                        continue;
                                    }
                                    foreach (var e in matches[i].ToArray())
                                    {
                                        ++score;
                                    }
                                }
                                // <----------------------------------------------

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }