private Rectangle[] FindEyes(string eyeFileName, CudaImage<Gray, Byte> image)
 {
     using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
     using (GpuMat eyeRegionMat = new GpuMat())
     {
         eye.DetectMultiScale(image, eyeRegionMat);
         Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
         return eyeRegion;
     }
 }
示例#2
0
      public void TestCudaImageAsyncOps()
      {
         if (CudaInvoke.HasCuda)
         {
            int counter = 0;
            Stopwatch watch = Stopwatch.StartNew();
            using (GpuMat img1 = new GpuMat(3000, 2000, DepthType.Cv8U, 3))
            using (GpuMat img2 = new GpuMat(3000, 2000, DepthType.Cv8U, 3))
            using (GpuMat img3 = new GpuMat())
            using (Stream stream = new Stream())
            using (GpuMat mat1 = new GpuMat())
            {
               img1.ConvertTo(mat1, DepthType.Cv8U, 1, 0, stream);
               while (!stream.Completed)
               {
                  if (counter <= int.MaxValue) counter++;
               }
               Trace.WriteLine(String.Format("Counter has been incremented {0} times", counter));

               counter = 0;
               CudaInvoke.CvtColor(img2, img3, CvToolbox.GetColorCvtCode(typeof(Bgr), typeof(Gray)), 1, stream);
               while (!stream.Completed)
               {
                  if (counter <= int.MaxValue) counter++;
               }
               Trace.WriteLine(String.Format("Counter has been incremented {0} times", counter));
            }
            watch.Stop();
            Trace.WriteLine(String.Format("Total time: {0} milliseconds", watch.ElapsedMilliseconds));
         }
      }
示例#3
0
        public Image<Gray, byte> Solve(Image<Gray, byte> left, Image<Gray, byte> right)
        {
            var size = left.Size;

            using (var leftGpu = new GpuMat(left.Rows, left.Cols, DepthType.Cv16S, 1))
            using (var rightGpu = new GpuMat(left.Rows, left.Cols, DepthType.Cv16S, 1))
            using (var disparityGpu = new GpuMat(left.Rows, left.Cols, DepthType.Cv16S, 1))
            using (var filteredDisparityGpu = new GpuMat(left.Rows, left.Cols, DepthType.Cv16S, 1))
            using (var filteredDisparity16S = new Mat(size, DepthType.Cv16S, 1))
            using (var filteredDisparity8U = new Mat(size, DepthType.Cv8U, 1))
            {
                leftGpu.Upload(left.Mat);
                rightGpu.Upload(right.Mat);

                algorithm.FindStereoCorrespondence(leftGpu, rightGpu, disparityGpu);

                filter.Apply(disparityGpu, leftGpu, filteredDisparityGpu);

                filteredDisparityGpu.Download(filteredDisparity16S);

                CvInvoke.MinMaxLoc(filteredDisparity16S, ref min, ref max, ref minPosition, ref maxPosition);

                filteredDisparity16S.ConvertTo(filteredDisparity8U, DepthType.Cv8U, 255.0/(Max - Min));

                return new Image<Gray, byte>(filteredDisparity8U.Bitmap);
            }
        }
示例#4
0
      static void Main()
      {
         Application.EnableVisualStyles();
         Application.SetCompatibleTextRenderingDefault(false);

         using (Mat image = new Mat("pedestrian.png"))
         {
            
            long processingTime;
            Rectangle[] results;

            if (CudaInvoke.HasCuda)
            {
               using (GpuMat gpuMat = new GpuMat(image))
                  results = FindPedestrian.Find(gpuMat, out processingTime);
            }
            else
            {
               using (UMat uImage = image.GetUMat(AccessType.ReadWrite))
                  results = FindPedestrian.Find(uImage, out processingTime);
            }
            
            foreach (Rectangle rect in results)
            {
               CvInvoke.Rectangle(image, rect, new Bgr(Color.Red).MCvScalar);
            }
            ImageViewer.Show(
               image,
               String.Format("Pedestrian detection using {0} in {1} milliseconds.",
                  CudaInvoke.HasCuda ? "GPU" : 
                  CvInvoke.UseOpenCL ? "OpenCL":
                  "CPU",
                  processingTime));
         }
      }
        public Mat Calculate(Bitmap referenceBitmap, Bitmap currentBitmap)
        {
            Mat homography;

            using (var detector = new CudaSURF(threshold))
            using (var model = new Image<Gray, byte>(referenceBitmap))
            using (var observed = new Image<Gray, byte>(currentBitmap))
            using (var modelMat = new GpuMat(model))
            using (var modelKeyPointsRaw = detector.DetectKeyPointsRaw(modelMat))
            using (var modelKeyPoints = new VectorOfKeyPoint())
            using (var modelDescriptorsRaw = detector.ComputeDescriptorsRaw(modelMat, null, modelKeyPointsRaw))
            using (var observedMat = new GpuMat(observed))
            using (var observedKeyPointsRaw = detector.DetectKeyPointsRaw(observedMat))
            using (var observedKeyPoints = new VectorOfKeyPoint())
            using (var observedDescriptorsRaw = detector.ComputeDescriptorsRaw(observedMat, null, observedKeyPointsRaw))
            using (
                var matcher =
                    new CudaBFMatcher(DistanceType.L2))
            using (var matches = new VectorOfVectorOfDMatch())
            {
                matcher.KnnMatch(observedDescriptorsRaw, modelDescriptorsRaw, matches, k);

                detector.DownloadKeypoints(modelKeyPointsRaw, modelKeyPoints);
                detector.DownloadKeypoints(observedKeyPointsRaw, observedKeyPoints);

                homography = TryFindHomography(modelKeyPoints, observedKeyPoints, matches);
            }

            return homography;
        }
示例#6
0
 public void TestGpuMatContinuous()
 {
    if (!CudaInvoke.HasCuda)
       return;
    GpuMat<Byte> mat = new GpuMat<byte>(1200, 640, 1, true);
    Assert.IsTrue(mat.IsContinuous);
 }
      /// <summary>
      /// Create a Cuda cascade classifier using the specific file
      /// </summary>
      /// <param name="fileName">The file to create the classifier from</param>
      public CudaCascadeClassifier(String fileName)
      {
#if !NETFX_CORE
         Debug.Assert(File.Exists(fileName), String.Format("The Cascade file {0} does not exist.", fileName));
#endif
         using (CvString s = new CvString(fileName))
            _ptr = CudaInvoke.cudaCascadeClassifierCreate(s);
         _buffer = new GpuMat(1, 100, DepthType.Cv32S, 4);
      }
示例#8
0
 /// <summary>
 /// Detect keypoints in the CudaImage
 /// </summary>
 /// <param name="img">The image where keypoints will be detected from</param>
 /// <param name="mask">The optional mask, can be null if not needed</param>
 /// <returns>An array of keypoints</returns>
 public MKeyPoint[] DetectKeyPoints(GpuMat img, GpuMat mask)
 {
    using (GpuMat tmp = DetectKeyPointsRaw(img, mask))
    using (VectorOfKeyPoint kpts = new VectorOfKeyPoint())
    {
       DownloadKeypoints(tmp, kpts);
       return kpts.ToArray();
    }
 }
示例#9
0
        /// <summary>
        /// Find the pedestrian in the image
        /// </summary>
        /// <param name="image">The image</param>
        /// <param name="processingTime">The pedestrian detection time in milliseconds</param>
        /// <returns>The region where pedestrians are detected</returns>
        public static Rectangle[] Find(Mat image, bool tryUseCuda, bool tryUseOpenCL, out long processingTime)
        {
            Stopwatch watch;
            Rectangle[] regions;

#if !(IOS || NETFX_CORE)
            //check if there is a compatible Cuda device to run pedestrian detection
            if (tryUseCuda && CudaInvoke.HasCuda)
            {  //this is the Cuda version
                using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8)))
                {
                    des.SetSVMDetector(des.GetDefaultPeopleDetector());

                    watch = Stopwatch.StartNew();
                    using (GpuMat cudaBgr = new GpuMat(image))
                    using (GpuMat cudaBgra = new GpuMat())
                    using (VectorOfRect vr = new VectorOfRect())
                    {
                        CudaInvoke.CvtColor(cudaBgr, cudaBgra, ColorConversion.Bgr2Bgra);
                        des.DetectMultiScale(cudaBgra, vr);
                        regions = vr.ToArray();
                    }
                }
            }
            else
#endif
            {
                //Many opencl functions require opencl compatible gpu devices. 
                //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented
                //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices).
                CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice;

                //this is the CPU/OpenCL version
                using (HOGDescriptor des = new HOGDescriptor())
                {
                    des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());

                    //load the image to umat so it will automatically use opencl is available
                    UMat umat = image.ToUMat(AccessType.Read);

                    watch = Stopwatch.StartNew();

                    MCvObjectDetection[] results = des.DetectMultiScale(umat);
                    regions = new Rectangle[results.Length];
                    for (int i = 0; i < results.Length; i++)
                        regions[i] = results[i].Rect;
                    watch.Stop();
                }
            }

            processingTime = watch.ElapsedMilliseconds;

            return regions;
        }
示例#10
0
      /// <summary>
      /// Find the pedestrian in the image
      /// </summary>
      /// <param name="image">The image</param>
      /// <param name="processingTime">The pedestrian detection time in milliseconds</param>
      /// <returns>The region where pedestrians are detected</returns>
      public static Rectangle[] Find(Mat image, bool tryUseCuda, out long processingTime)
      {
         Stopwatch watch;
         Rectangle[] regions;

#if !(__IOS__ || NETFX_CORE)
         //check if there is a compatible Cuda device to run pedestrian detection
         if (tryUseCuda && CudaInvoke.HasCuda)
         {  //this is the Cuda version
            using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8,8), new Size(8,8)))
            {
               des.SetSVMDetector(des.GetDefaultPeopleDetector());

               watch = Stopwatch.StartNew();
               using (GpuMat cudaBgr = new GpuMat(image))
               using (GpuMat cudaBgra = new GpuMat() )
               using (VectorOfRect vr = new VectorOfRect())
               {
                  CudaInvoke.CvtColor(cudaBgr, cudaBgra, ColorConversion.Bgr2Bgra);
                  des.DetectMultiScale(cudaBgra, vr);
                  regions = vr.ToArray();
               }
            }
         }
         else
#endif
         {  
            //this is the CPU/OpenCL version
            using (HOGDescriptor des = new HOGDescriptor())
            {
               des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
               
               //load the image to umat so it will automatically use opencl is available
               UMat umat = image.ToUMat(AccessType.Read);

               watch = Stopwatch.StartNew();
               
               MCvObjectDetection[] results = des.DetectMultiScale(umat);
               regions = new Rectangle[results.Length];
               for (int i = 0; i < results.Length; i++)
                  regions[i] = results[i].Rect;
               watch.Stop();
            }
         }
        
         processingTime = watch.ElapsedMilliseconds;

         return regions;
      }
示例#11
0
      /// <summary>
      /// Find the pedestrian in the image
      /// </summary>
      /// <param name="image">The image</param>
      /// <param name="processingTime">The processing time in milliseconds</param>
      /// <returns>The region where pedestrians are detected</returns>
      public static Rectangle[] Find(IInputArray image, out long processingTime)
      {
         Stopwatch watch;
         Rectangle[] regions;

         using (InputArray iaImage = image.GetInputArray())
         {
#if !(__IOS__ || NETFX_CORE)
            //if the input array is a GpuMat
            //check if there is a compatible Cuda device to run pedestrian detection
            if (iaImage.Kind == InputArray.Type.CudaGpuMat)
            {
               //this is the Cuda version
               using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8)))
               {
                  des.SetSVMDetector(des.GetDefaultPeopleDetector());

                  watch = Stopwatch.StartNew();
                  using (GpuMat cudaBgra = new GpuMat())
                  using (VectorOfRect vr = new VectorOfRect())
                  {
                     CudaInvoke.CvtColor(image, cudaBgra, ColorConversion.Bgr2Bgra);
                     des.DetectMultiScale(cudaBgra, vr);
                     regions = vr.ToArray();
                  }
               }
            }
            else
#endif
            {
               //this is the CPU/OpenCL version
               using (HOGDescriptor des = new HOGDescriptor())
               {
                  des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
                  watch = Stopwatch.StartNew();

                  MCvObjectDetection[] results = des.DetectMultiScale(image);
                  regions = new Rectangle[results.Length];
                  for (int i = 0; i < results.Length; i++)
                     regions[i] = results[i].Rect;
                  watch.Stop();
               }
            }

            processingTime = watch.ElapsedMilliseconds;

            return regions;
         }
      }
示例#12
0
        /// <summary>
        /// Download keypoints from GPU to CPU memory.
        /// </summary>
        /// <param name="dKeypoints"></param>
        /// <returns></returns>
        public KeyPoint[] DownloadKeypoints(GpuMat dKeypoints)
        {
            if (disposed)
                throw new ObjectDisposedException(GetType().Name);
            if (dKeypoints == null)
                throw new ArgumentNullException("dKeypoints");

            KeyPoint[] result;
            using (var keypoints = new VectorOfKeyPoint())
            {
                NativeMethods.gpu_FAST_GPU_downloadKeypoints(ptr, dKeypoints.CvPtr, keypoints.CvPtr);
                result = keypoints.ToArray();
            }

            GC.KeepAlive(dKeypoints);
            return result;
        }
示例#13
0
        /// <summary>
        /// Finds the keypoints using FAST detector.
        /// </summary>
        /// <param name="image">Image where keypoints (corners) are detected. 
        /// Only 8-bit grayscale images are supported.</param>
        /// <param name="mask">Optional input mask that marks the regions where we should detect features.</param>
        /// <param name="keypoints">The output vector of keypoints.</param>
        public void Run(GpuMat image, GpuMat mask, GpuMat keypoints)
        {
            if (disposed)
                throw new ObjectDisposedException(GetType().Name);
            if (image == null)
                throw new ArgumentNullException("image");
            if (mask == null)
                throw new ArgumentNullException("mask");
            if (keypoints == null)
                throw new ArgumentNullException("keypoints");

            NativeMethods.gpu_FAST_GPU_operator1(ptr, image.CvPtr, mask.CvPtr, keypoints.CvPtr);

            GC.KeepAlive(image);
            GC.KeepAlive(mask);
            GC.KeepAlive(keypoints);
        }
示例#14
0
 public void UndistortRectifyMap()
 {
     StereoSystem.UndistortRectifyMap(ImageHeight,
         ImageWidth,
         RectificationTransform.Data,
         CameraMatrix.Data,
         Fc.Data,
         Cc.Data,
         distCoeffs.Data,
         out mapx,
         out mapy);
     mapxGPU = new GpuMat<float>(mapx);
     mapyGPU = new GpuMat<float>(mapy);
 }
示例#15
0
 /// <summary>
 /// Ensures that size of the given matrix is not less than (rows, cols) size
 /// and matrix type is match specified one too
 /// </summary>
 /// <param name="size">Number of rows and columns in a 2D array.</param>
 /// <param name="type">Array type.</param>
 /// <param name="m"></param>
 public static void EnsureSizeIsEnough(Size size, MatType type, GpuMat m)
 {
     ThrowIfGpuNotAvailable();
     EnsureSizeIsEnough(size.Height, size.Width, type, m);
 }
示例#16
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="img"></param>
        /// <param name="hitThreshold"></param>
        /// <param name="winStride"></param>
        /// <param name="padding"></param>
        /// <returns></returns>
        public virtual Point[] Detect(GpuMat img, double hitThreshold, Size winStride, Size padding)
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            if (img == null)
                throw new ArgumentNullException("img");

            using (var flVec = new VectorOfPoint())
            {
                NativeMethods.HOGDescriptor_detect(ptr, img.CvPtr, flVec.CvPtr, hitThreshold, winStride, padding);
                // std::vector<cv::Point>*からCvPoint[]に移し替えて返す
                return flVec.ToArray();
            }
        }
示例#17
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="left"></param>
        /// <param name="right"></param>
        /// <param name="disparity"></param>
#else
        /// <summary>
        /// 
        /// </summary>
        /// <param name="left"></param>
        /// <param name="right"></param>
        /// <param name="disparity"></param>
#endif
        public void Run(GpuMat left, GpuMat right, GpuMat disparity)
        {
            if (disposed)
                throw new ObjectDisposedException("StereoBM_GPU");
            if(left == null)
                throw new ArgumentNullException("left");
            if(right == null)
                throw new ArgumentNullException("right");
            if (disparity == null)
                throw new ArgumentNullException("disparity");

            NativeMethods.StereoBM_GPU_run1(ptr, left.CvPtr, right.CvPtr, disparity.CvPtr);
        }
示例#18
0
 /// <summary>
 /// Obtain the keypoints array from GpuMat
 /// </summary>
 /// <param name="src">The keypoints obtained from DetectKeyPointsRaw</param>
 /// <param name="dst">The vector of keypoints</param>
 public void DownloadKeypoints(GpuMat src, VectorOfKeyPoint dst)
 {
     ContribInvoke.cudaSURFDownloadKeypoints(_ptr, src, dst);
 }
示例#19
0
 /// <summary>
 /// Obtain a GpuMat from the keypoints array
 /// </summary>
 /// <param name="src">The keypoints array</param>
 /// <param name="dst">A GpuMat that represent the keypoints</param>
 public void UploadKeypoints(VectorOfKeyPoint src, GpuMat dst)
 {
     ContribInvoke.cudaSURFUploadKeypoints(_ptr, src, dst);
 }
        public static Bitmap PerformShapeDetection(Bitmap frame, ShapeDetectionVariables detectionVars)
        {
            StringBuilder msgBuilder = new StringBuilder("Performance: ");

            Image <Bgr, Byte> img = new Image <Bgr, byte>(frame);
            Mat MatImg            = img.Mat;

            Mat outputImg = new Mat();

            if (CudaInvoke.HasCuda)
            {
                using (GpuMat gMatSrc = new GpuMat())
                    using (GpuMat gMatDst = new GpuMat()) {
                        gMatSrc.Upload(MatImg);
                        CudaGaussianFilter noiseReducetion = new CudaGaussianFilter(MatImg.Depth, img.NumberOfChannels, MatImg.Depth, img.NumberOfChannels, new Size(1, 1), 0);
                        noiseReducetion.Apply(gMatSrc, gMatDst);
                        gMatDst.Download(outputImg);
                    }
            }
            else
            {
                Mat pyrDown = new Mat();
                CvInvoke.PyrDown(img, pyrDown);
                CvInvoke.PyrUp(pyrDown, img);
                outputImg = img.Mat;
            }

            UMat uimage = new UMat();

            CvInvoke.CvtColor(outputImg, uimage, ColorConversion.Bgr2Gray);

            CircleF[] circles = new CircleF[0];
            if (detectionVars.calcCircles)
            {
                circles = CvInvoke.HoughCircles(
                    uimage,
                    HoughType.Gradient, 1.0, 20.0,
                    detectionVars.circleCannyThreshold,
                    detectionVars.circleAccumulatorThreshold == 0 ? 1 : detectionVars.circleAccumulatorThreshold,
                    detectionVars.minradius,
                    detectionVars.maxRadius);
            }

            #region Canny and edge detection
            UMat cannyEdges = new UMat();
            CvInvoke.Canny(uimage, cannyEdges, detectionVars.lineCannyThreshold, detectionVars.cannyThresholdLinking);

            LineSegment2D[] lines = new LineSegment2D[0];
            if (detectionVars.calcLines)
            {
                lines = CvInvoke.HoughLinesP(
                    cannyEdges,
                    1,                           //Distance resolution in pixel-related units
                    Math.PI / 45.0,              //Angle resolution measured in radians.
                    detectionVars.lineThreshold, //threshold
                    detectionVars.minLineWidth,  //min Line width
                    10);                         //gap between lines
            }
            #endregion

            #region Find triangles and rectangles

            List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle

            if (detectionVars.calcRectTri)
            {
                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) {
                    CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                    int count = contours.Size;
                    for (int i = 0; i < count; i++)
                    {
                        using (VectorOfPoint contour = contours[i])
                            using (VectorOfPoint approxContour = new VectorOfPoint()) {
                                CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                                if (CvInvoke.ContourArea(approxContour, false) > 250)   //only consider contours with area greater than 250
                                {
                                    if (approxContour.Size == 4)                        //The contour has 4 vertices.
                                    {
                                        #region determine if all the angles in the contour are within [80, 100] degree
                                        bool            isRectangle = true;
                                        Point[]         pts         = approxContour.ToArray();
                                        LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                        for (int j = 0; j < edges.Length; j++)
                                        {
                                            double angle = Math.Abs(
                                                edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                            if (angle < 80 || angle > 100)
                                            {
                                                isRectangle = false;
                                                break;
                                            }
                                        }
                                        #endregion

                                        if (isRectangle)
                                        {
                                            boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                        }
                                    }
                                }
                            }
                    }
                }
            }

            #endregion

            Image <Bgra, Byte> alphaImgShape = new Image <Bgra, byte>(img.Size.Width, img.Size.Height, new Bgra(0, 0, 0, .5));
            Mat alphaimg = new Mat();
            CvInvoke.CvtColor(img, alphaimg, ColorConversion.Bgr2Bgra);
            #region draw rectangles and triangles
            if (detectionVars.calcRectTri)
            {
                Image <Bgr, Byte> triangleRectangleImage = new Image <Bgr, Byte>(img.Size);

                foreach (RotatedRect box in boxList)
                {
                    CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(0, 255, 0).MCvScalar, 2);
                }

                CvInvoke.AddWeighted(alphaImgShape, .5, BlackTransparent(triangleRectangleImage), .5, 0, alphaImgShape);


                if (CudaInvoke.HasCuda)
                {
                    using (GpuMat gMatSrc = new GpuMat())
                        using (GpuMat gMatSrc2 = new GpuMat())
                            using (GpuMat gMatDst = new GpuMat()) {
                                gMatSrc.Upload(alphaimg);
                                gMatSrc2.Upload(alphaImgShape);
                                CudaInvoke.AlphaComp(gMatSrc, gMatSrc2, gMatDst, AlphaCompTypes.Plus);
                                gMatDst.Download(alphaimg);
                            }
                }
                else
                {
                    img = Overlay(img, alphaImgShape);
                }
            }
            #endregion

            #region draw circles
            if (detectionVars.calcCircles)
            {
                Image <Bgr, Byte> circleImage = new Image <Bgr, Byte>(img.Size);
                foreach (CircleF circle in circles.Take(10))
                {
                    CvInvoke.Circle(circleImage, Point.Round(circle.Center), (int)circle.Radius, new Bgr(0, 255, 0).MCvScalar, 2);
                }

                alphaImgShape = new Image <Bgra, byte>(img.Size.Width, img.Size.Height, new Bgra(0, 0, 0, .5));
                CvInvoke.AddWeighted(alphaImgShape, .7, BlackTransparent(circleImage), .5, 0, alphaImgShape);
                if (CudaInvoke.HasCuda)
                {
                    using (GpuMat gMatSrc = new GpuMat())
                        using (GpuMat gMatSrc2 = new GpuMat())
                            using (GpuMat gMatDst = new GpuMat()) {
                                gMatSrc.Upload(alphaimg);
                                gMatSrc2.Upload(alphaImgShape);
                                CudaInvoke.AlphaComp(gMatSrc, gMatSrc2, gMatDst, AlphaCompTypes.Plus);
                                gMatDst.Download(alphaimg);
                            }
                }
                else
                {
                    img = Overlay(img, alphaImgShape);
                }
            }
            #endregion

            #region draw lines

            if (detectionVars.calcLines)
            {
                Image <Bgr, Byte> lineImage = new Image <Bgr, Byte>(img.Size);
                foreach (LineSegment2D line in lines)
                {
                    CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(0, 255, 0).MCvScalar, 2);
                }

                alphaImgShape = new Image <Bgra, byte>(img.Size.Width, img.Size.Height, new Bgra(0, 0, 0, .5));
                CvInvoke.AddWeighted(alphaImgShape, .5, BlackTransparent(lineImage), .5, 0, alphaImgShape);
                if (CudaInvoke.HasCuda)
                {
                    using (GpuMat gMatSrc = new GpuMat())
                        using (GpuMat gMatSrc2 = new GpuMat())
                            using (GpuMat gMatDst = new GpuMat()) {
                                gMatSrc.Upload(alphaimg);
                                gMatSrc2.Upload(alphaImgShape);
                                CudaInvoke.AlphaComp(gMatSrc, gMatSrc2, gMatDst, AlphaCompTypes.Plus);
                                gMatDst.Download(alphaimg);
                            }
                }
                else
                {
                    img = Overlay(img, alphaImgShape);
                }
            }
            #endregion

            GC.Collect();   // first time I've had to use this but this program will use as much memory as possible, resulting in corrptions

            return(alphaimg.Bitmap ?? frame);
        }
示例#21
0
        public static void Detect(
            Mat image, String faceFileName, String eyeleftFileName, string eyerightFileName,
            List <Rectangle> faces, List <Rectangle> eyesleft, List <Rectangle> eyesright,
            bool tryUseCuda, bool tryUseOpenCL,
            out long detectionTime)
        {
            Stopwatch watch;

#if !(IOS || NETFX_CORE)
            if (tryUseCuda && CudaInvoke.HasCuda)
            {
                using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                    using (CudaCascadeClassifier eyeleft = new CudaCascadeClassifier(eyeleftFileName))
                        using (CudaCascadeClassifier eyeright = new CudaCascadeClassifier(eyerightFileName))
                        {
                            face.ScaleFactor   = 1.1;
                            face.MinNeighbors  = 10;
                            face.MinObjectSize = Size.Empty;

                            eyeleft.ScaleFactor   = 1.1;
                            eyeleft.MinNeighbors  = 10;
                            eyeleft.MinObjectSize = Size.Empty;

                            eyeright.ScaleFactor   = 1.1;
                            eyeright.MinNeighbors  = 10;
                            eyeright.MinObjectSize = Size.Empty;
                            watch = Stopwatch.StartNew();
                            using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image))
                                using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                                    using (GpuMat region = new GpuMat())
                                    {
                                        face.DetectMultiScale(gpuGray, region);
                                        Rectangle[] faceRegion = face.Convert(region);
                                        faces.AddRange(faceRegion);
                                        foreach (Rectangle f in faceRegion)
                                        {
                                            using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                                            {
                                                //For some reason a clone is required.
                                                //Might be a bug of CudaCascadeClassifier in opencv
                                                using (CudaImage <Gray, Byte> clone = faceImg.Clone(null))
                                                    using (GpuMat eyeRegionMat = new GpuMat())
                                                    {
                                                        eyeleft.DetectMultiScale(clone, eyeRegionMat);
                                                        Rectangle[] eyeRegion = eyeleft.Convert(eyeRegionMat);
                                                        foreach (Rectangle eleft in eyeRegion)
                                                        {
                                                            Rectangle eyeRectleft = eleft;
                                                            eyeRectleft.Offset(f.X, f.Y);
                                                            eyesleft.Add(eyeRectleft);
                                                        }
                                                    }
                                                using (CudaImage <Gray, Byte> clone = faceImg.Clone(null))
                                                    using (GpuMat eyeRegionMat = new GpuMat())
                                                    {
                                                        eyeright.DetectMultiScale(clone, eyeRegionMat);
                                                        Rectangle[] eyeRegion = eyeright.Convert(eyeRegionMat);
                                                        foreach (Rectangle eright in eyeRegion)
                                                        {
                                                            Rectangle eyeRectright = eright;
                                                            eyeRectright.Offset(f.X, f.Y);
                                                            eyesright.Add(eyeRectright);
                                                        }
                                                    }
                                            }
                                        }
                                    }
                            watch.Stop();
                        }
            }
            else
#endif
            {
                //Many opencl functions require opencl compatible gpu devices.
                //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented
                //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices).
                CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice;


                //Read the HaarCascade objects
                using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                    using (CascadeClassifier eyeleft = new CascadeClassifier(eyeleftFileName))
                        using (CascadeClassifier eyeright = new CascadeClassifier(eyerightFileName))
                        {
                            watch = Stopwatch.StartNew();
                            using (UMat ugray = new UMat())
                            {
                                CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                                //Cân bằng sáng của ảnh
                                CvInvoke.EqualizeHist(ugray, ugray);

                                //Phát hiện các khuôn mặt từ hình ảnh màu xám và lưu các vị trí làm hình chữ nhật
                                // Chiều thứ nhất là kênh
                                // Kích thước thứ hai là chỉ mục của hình chữ nhật trong kênh cụ thể
                                Rectangle[] facesDetected = face.DetectMultiScale(
                                    ugray,
                                    1.1,
                                    10,
                                    new Size(20, 20));

                                faces.AddRange(facesDetected);

                                foreach (Rectangle f in facesDetected)
                                {
                                    //Sử dụng khu vực của khuôn mặt
                                    using (UMat faceRegion = new UMat(ugray, f))
                                    {
                                        //tìm hình chữ nhật của mắt phải
                                        Rectangle[] eyesleftDetected = eyeleft.DetectMultiScale(
                                            faceRegion,
                                            1.1,
                                            10,
                                            new Size(20, 20));
                                        foreach (Rectangle eleft in eyesleftDetected)
                                        {
                                            Rectangle eyeRectleft = eleft;
                                            eyeRectleft.Offset(f.X, f.Y);
                                            eyesleft.Add(eyeRectleft);
                                        }
                                        //tìm hình chữ nhật của mắt phải
                                        Rectangle[] eyesrightDetected = eyeright.DetectMultiScale(
                                            faceRegion,
                                            1.1,
                                            10,
                                            new Size(20, 20));
                                        foreach (Rectangle eright in eyesrightDetected)
                                        {
                                            Rectangle eyeRectright = eright;
                                            eyeRectright.Offset(f.X, f.Y);
                                            eyesright.Add(eyeRectright);
                                        }
                                    }
                                }
                            }
                            watch.Stop();
                        }
            }
            detectionTime = watch.ElapsedMilliseconds;//đo tổng thời gian trôi qua
        }
        // function that depicts results of optical flow operations
        // requires reference to image being processed, the results of Farneback algorithm stored in flow_x and flow_y
        // step gives the distance between pixels that are depicted, shift_that_counts is threshold for vector length that is used for calculations
        private Image <Bgr, Byte> Draw_Farneback_flow_map(Image <Bgr, Byte> img_curr, Image <Gray, float> flow_x, Image <Gray, float> flow_y, OpticalFlowVariable optiVars)
        {
            // NOTE: flow Images (flow_x and flow_y) are organized like this:
            // at index (is position of pixel before optical flow operation) of Image array
            // the shift of this specific pixel after the flow operation is stored
            // if no shift has occured value stored at index is zero
            // (i.e., pixel[index] = 0
            GC.Collect(0, GCCollectionMode.Forced);

            Image <Bgr, Byte> blackFrame = new Image <Bgr, Byte>(new Bitmap(1280 / frameReduction, 720 / frameReduction));

            System.Drawing.Point from_dot_xy = new System.Drawing.Point(); // point variable to draw lines between dots before and after flow (=vectors)
            System.Drawing.Point to_dot_xy   = new System.Drawing.Point(); // point variable, which will be endpoint of line between dots before and after flow

            MCvScalar col;                                                 // variable to store color values of lines representing flow vectors

            col.V0 = 100;
            col.V1 = 255;
            col.V2 = 0;
            col.V3 = 100;


            // for drawing central line based on window size
            System.Drawing.Point[] window_centre = new System.Drawing.Point[2];

            window_centre[0].X = img_curr.Width / 2;// * Convert.ToInt32(txt_resize_factor.Text)/ 2;
            window_centre[0].Y = 0;

            window_centre[1].X = img_curr.Width / 2; //* Convert.ToInt32(txt_resize_factor.Text) / 2;
            window_centre[1].Y = orig_height;


            // Point variables that constitute starting point for drawing summed and mean vectors onto image
            System.Drawing.Point vector_right = new System.Drawing.Point();
            System.Drawing.Point vector_left  = new System.Drawing.Point();

            // variables used for summing vectors to left and to the right of the window's centre
            System.Drawing.Point vector_right_end_window = new System.Drawing.Point();
            System.Drawing.Point vector_left_end_window  = new System.Drawing.Point();


            // determine centre of output window (needed for summed vectors)
            int mid_point_horz = 1280 * frameReduction / 2; // width
            int mid_point_vert = 720 * frameReduction / 2;  // height

            // landmark coordinates that are origin of direction vectors
            // near centre of image window; to depict motion of left and right half of "body" (or more precisely, window)
            vector_right.X = (mid_point_horz + 10) * optiVars.stepRate;
            vector_right.Y = mid_point_vert * optiVars.stepRate;

            vector_left.X = (mid_point_horz - 10) * optiVars.stepRate;
            vector_left.Y = mid_point_vert * optiVars.stepRate;


            // counting landmarks in flow field that exceed a certain value (shift_that_counts); left and right is based on centre of window (half of width)
            double count_X_right = 0;
            double count_Y_right = 0;

            double count_X_left = 0;
            double count_Y_left = 0;

            // loops over image matrix; position of dots before and after optical flow operations are compared and vector is drawn between the old and the new position
            for (int i = 0; i < flow_x.Rows; i += optiVars.stepRate)     // NOTE: steps are given by step variable in arguments of method
            {
                for (int j = 0; j < flow_x.Cols; j += optiVars.stepRate) // BEGIN FOR

                // pixel shift measured by optical flow is transferred to point variables
                // storing starting point of motion (from_dot..) and its end points (to_dot...)

                {
                    to_dot_xy.X = (int)flow_x.Data[i, j, 0]; // access single pixel of flow matrix where x-coords of pixel after flow are stored; only gives the shift
                    to_dot_xy.Y = (int)flow_y.Data[i, j, 0]; // access single pixel of flow matrix where y-coords of pixel after flow are stored; only gives the shift

                    from_dot_xy.X = j;                       //  index of loop is  position on image (here: x-coord); X is cols
                    from_dot_xy.Y = i;                       // index of of loop is  position on image (here: y-coord);  Y is rows


                    // LEFT SIDE OF WINDOW BASED CENTRE
                    if (j < window_centre[0].X)
                    {
                        //  count the x and y indices and sum them when they exceed the value given by shift_that_counts (here:0)
                        if (Math.Abs(to_dot_xy.X) > optiVars.shiftThatCounts)
                        {
                            count_X_left++;
                        }
                        if (Math.Abs(to_dot_xy.Y) > optiVars.shiftThatCounts)
                        {
                            count_Y_left++;
                        }
                        // sum up vectors
                        vector_left_end_window.Y += to_dot_xy.Y;
                        vector_left_end_window.X += to_dot_xy.X;
                    }
                    else //(j > window_centre[0].X)// WINDOW BASED CENTRE
                    {
                        //  like above; count the x and y indices and sum them
                        if (Math.Abs(to_dot_xy.X) > optiVars.shiftThatCounts)
                        {
                            count_X_right++;
                        }

                        if (Math.Abs(to_dot_xy.Y) > optiVars.shiftThatCounts)
                        {
                            count_Y_right++;
                        }

                        // sum  vectors
                        vector_right_end_window.Y += to_dot_xy.Y;
                        vector_right_end_window.X += to_dot_xy.X;
                    }

                    to_dot_xy.X = from_dot_xy.X + to_dot_xy.X; // new x-coord position of pixel (taking into account distance from the origin)
                    to_dot_xy.Y = from_dot_xy.Y + to_dot_xy.Y; // new y-coord postion of pixel

                    // draw line between coords on image and pixel shift stored in flow field after applying  optical-flow
                    if (GetDistance(from_dot_xy.X, from_dot_xy.Y, to_dot_xy.X, to_dot_xy.Y) > optiVars.shiftThatCounts)
                    {
                        CvInvoke.Line(blackFrame, from_dot_xy, to_dot_xy, col, 1);
                    }


                    //CvInvoke.Imshow("Flow field vectors", img_curr); // show image with flow depicted as lines
                } // END of both for loops
            }
            Mat blackDst = new Mat();
            Mat BlackMat = blackFrame.Mat;

            using (GpuMat gMatSrc = new GpuMat())
                using (GpuMat gMatDst = new GpuMat()) {
                    gMatSrc.Upload(BlackMat);
                    Emgu.CV.Cuda.CudaInvoke.Resize(gMatSrc, gMatDst, new Size(0, 0), frameReduction, frameReduction, Inter.Area);
                    gMatDst.Download(blackDst);
                }

            GC.Collect();

            return(blackDst.ToImage <Bgr, Byte>());
        }
        // calculates the optical flow according to the Farneback algorithm
        public Bitmap Dense_Optical_Flow(Bitmap bmp, OpticalFlowVariable optiVariables, Camera cam)
        {
            frameReduction = optiVariables.frameReduction < 1 ? 1 : optiVariables.frameReduction;
            // frame becomes previous frame (i.e., prev_frame stores information about current frame)
            prev_frame = matframe;

            Image <Bgr, Byte> imageCV = new Image <Bgr, byte>(bmp); //Image Class from Emgu.CV

            matframe = imageCV.Mat;                                 //This is your Image converted to Mat

            if (prev_frame == null)
            {
                return(bmp);
            }

            // frame_nr increment by number of steps given in textfield on user interface
            frame_nr += 1;


            // intialize this Image Matrix before resizing (see below), so it remains at original size
            img_average_vectors = new Image <Bgr, byte>(matframe.Width, matframe.Height);

            orig_height = matframe.Height;

            Size n_size = new Size(matframe.Width / frameReduction,
                                   matframe.Height / frameReduction);

            // Resize frame and previous frame (smaller to reduce processing load)
            //Source

            Mat matFramDst = new Mat();

            using (GpuMat gMatSrc = new GpuMat())
                using (GpuMat gMatDst = new GpuMat()) {
                    gMatSrc.Upload(matframe);
                    Emgu.CV.Cuda.CudaInvoke.Resize(gMatSrc, gMatDst, new Size(0, 0), (double)1 / frameReduction, (double)1 / frameReduction);
                    gMatDst.Download(matFramDst);
                }

            matframe = matFramDst;

            if (prev_frame.Height != matframe.Height)
            {
                return(bmp);
            }



            // images that are compared during the flow operations (see below)
            // these need to be greyscale images
            Image <Gray, Byte> prev_grey_img, curr_grey_img;

            prev_grey_img = new Image <Gray, byte>(prev_frame.Width, prev_frame.Height);
            curr_grey_img = new Image <Gray, byte>(matframe.Width, matframe.Height);

            // Image arrays to store information of flow vectors (one image array for each direction, which is x and y)
            Image <Gray, float> flow_x;
            Image <Gray, float> flow_y;

            flow_x = new Image <Gray, float>(matframe.Width, matframe.Height);
            flow_y = new Image <Gray, float>(matframe.Width, matframe.Height);

            // assign information stored in frame and previous frame in greyscale images (works without convert function)
            CvInvoke.CvtColor(matframe, curr_grey_img, ColorConversion.Bgr2Gray);
            CvInvoke.CvtColor(prev_frame, prev_grey_img, ColorConversion.Bgr2Gray);


            // Apply Farneback dense optical flow
            // parameters are the two greyscale images (these are compared)
            // and two image arrays storing the flow information
            // the results of the procedure are stored
            // the rest of the parameters are:
            // pryScale: specifies image scale to build pyramids: 0.5 means that each next layer is twice smaller than the former
            // levels: number of pyramid levels: 1 means no extra layers
            // winSize: the average window size; larger values = more robust to noise but more blur
            // iterations: number of iterations at each pyramid level
            // polyN: size of pixel neighbourhood: higher = more precision but more blur
            // polySigma
            // flags


            CvInvoke.CalcOpticalFlowFarneback(prev_grey_img, curr_grey_img, flow_x, flow_y, 0.5, 3, 10, 3, 6, 1.3, 0);


            // call function that shows results of Farneback algorithm
            Image <Bgr, Byte> farnebackImg = Draw_Farneback_flow_map(matframe.ToImage <Bgr, Byte>(), flow_x, flow_y, optiVariables);// given in global variables section

            // Release memory
            prev_grey_img.Dispose();
            curr_grey_img.Dispose();
            flow_x.Dispose();
            flow_y.Dispose();

            //return farnebackImg.ToBitmap();

            Image <Bgra, Byte> alphaImgShape = new Image <Bgra, byte>(imageCV.Size.Width, imageCV.Size.Height, new Bgra(0, 0, 0, .5));

            CvInvoke.AddWeighted(alphaImgShape, .5, BlackTransparent(farnebackImg), .5, 0, alphaImgShape);

            Mat alphaimg = new Mat();

            CvInvoke.CvtColor(imageCV, alphaimg, ColorConversion.Bgr2Bgra);

            if (CudaInvoke.HasCuda)
            {
                using (GpuMat gMatSrc = new GpuMat())
                    using (GpuMat gMatSrc2 = new GpuMat())
                        using (GpuMat gMatDst = new GpuMat()) {
                            gMatSrc.Upload(alphaimg);
                            gMatSrc2.Upload(alphaImgShape);
                            CudaInvoke.AlphaComp(gMatSrc, gMatSrc2, gMatDst, AlphaCompTypes.Plus);
                            gMatDst.Download(alphaimg);
                        }
                return(alphaimg.Bitmap);
            }
            else
            {
                return(Overlay(imageCV, alphaImgShape).ToBitmap());
            }
        }
示例#24
0
        public static void Detect(
            Mat image, String faceFileName, String eyeFileName,
            List <Rectangle> faces, List <Rectangle> eyes,
            bool tryUseCuda, bool tryUseOpenCL,
            out long detectionTime)
        {
            Stopwatch watch;

#if !(IOS || NETFX_CORE)
            if (tryUseCuda && CudaInvoke.HasCuda)
            {
                using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                    using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
                    {
                        face.ScaleFactor   = 1.1;
                        face.MinNeighbors  = 10;
                        face.MinObjectSize = Size.Empty;
                        eye.ScaleFactor    = 1.1;
                        eye.MinNeighbors   = 10;
                        eye.MinObjectSize  = Size.Empty;
                        watch = Stopwatch.StartNew();
                        using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image))
                            using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                                using (GpuMat region = new GpuMat())
                                {
                                    face.DetectMultiScale(gpuGray, region);
                                    Rectangle[] faceRegion = face.Convert(region);
                                    faces.AddRange(faceRegion);
                                    foreach (Rectangle f in faceRegion)
                                    {
                                        using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                                        {
                                            //For some reason a clone is required.
                                            //Might be a bug of CudaCascadeClassifier in opencv
                                            using (CudaImage <Gray, Byte> clone = faceImg.Clone(null))
                                                using (GpuMat eyeRegionMat = new GpuMat())
                                                {
                                                    eye.DetectMultiScale(clone, eyeRegionMat);
                                                    Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
                                                    foreach (Rectangle e in eyeRegion)
                                                    {
                                                        Rectangle eyeRect = e;
                                                        eyeRect.Offset(f.X, f.Y);
                                                        eyes.Add(eyeRect);
                                                    }
                                                }
                                        }
                                    }
                                }
                        watch.Stop();
                    }
            }
            else
#endif
            {
                //Many opencl functions require opencl compatible gpu devices.
                //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented
                //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices).
                CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice;


                //Read the HaarCascade objects
                using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                    using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
                    {
                        watch = Stopwatch.StartNew();
                        using (UMat ugray = new UMat())
                        {
                            CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                            //normalizes brightness and increases contrast of the image
                            CvInvoke.EqualizeHist(ugray, ugray);

                            //Detect the faces  from the gray scale image and store the locations as rectangle
                            //The first dimensional is the channel
                            //The second dimension is the index of the rectangle in the specific channel
                            Rectangle[] facesDetected = face.DetectMultiScale(
                                ugray,
                                1.1,
                                10,
                                new Size(20, 20));

                            faces.AddRange(facesDetected);

                            foreach (Rectangle f in facesDetected)
                            {
                                //Get the region of interest on the faces
                                using (UMat faceRegion = new UMat(ugray, f))
                                {
                                    Rectangle[] eyesDetected = eye.DetectMultiScale(
                                        faceRegion,
                                        1.1,
                                        10,
                                        new Size(20, 20));

                                    foreach (Rectangle e in eyesDetected)
                                    {
                                        Rectangle eyeRect = e;
                                        eyeRect.Offset(f.X, f.Y);
                                        eyes.Add(eyeRect);
                                    }
                                }
                            }
                        }
                        watch.Stop();
                    }
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
示例#25
0
        /// <summary>
        /// Gets final array of keypoints.
        /// Performs nonmax suppression if needed.
        /// </summary>
        /// <param name="keypoints"></param>
        /// <returns>Final count of keypoints</returns>
        public int GetKeyPoints(GpuMat keypoints)
        {
            if (disposed)
                throw new ObjectDisposedException(GetType().Name);
            if (keypoints == null)
                throw new ArgumentNullException("keypoints");

            int result = NativeMethods.gpu_FAST_GPU_getKeyPoints(ptr, keypoints.CvPtr);

            GC.KeepAlive(keypoints);
            return result;
        }
示例#26
0
        private Image <Bgr, byte> Match(Image <Bgr, byte> image1, Image <Bgr, byte> image2, int flag)
        {
            HomographyMatrix homography      = null;
            SURFDetector     surfDetectorCPU = new SURFDetector(500, false);

            int    k = 2;           //number of matches that we want ot find between image1 and image2
            double uniquenessThreshold = 0.8;

            Matrix <int>  indices;
            Matrix <byte> mask;

            VectorOfKeyPoint KeyPointsImage1;
            VectorOfKeyPoint KeyPointsImage2;

            Image <Gray, Byte> Image1G = image1.Convert <Gray, Byte>();
            Image <Gray, Byte> Image2G = image2.Convert <Gray, Byte>();

            if (GpuInvoke.HasCuda)      //Using CUDA, the GPUs can be used for general purpose processing (i.e., not exclusively graphics), speed up performance
            {
                Console.WriteLine("Here");
                GpuSURFDetector surfDetectorGPU = new GpuSURFDetector(surfDetectorCPU.SURFParams, 0.01f);

                // extract features from Image1
                using (GpuImage <Gray, Byte> gpuImage1 = new GpuImage <Gray, byte>(Image1G))                                                     //convert CPU input image to GPUImage(greyscale)
                    using (GpuMat <float> gpuKeyPointsImage1 = surfDetectorGPU.DetectKeyPointsRaw(gpuImage1, null))                              //find key points for image
                        using (GpuMat <float> gpuDescriptorsImage1 = surfDetectorGPU.ComputeDescriptorsRaw(gpuImage1, null, gpuKeyPointsImage1)) //calculate descriptor for each key point
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))                     //create a new matcher object
                            {
                                KeyPointsImage1 = new VectorOfKeyPoint();
                                surfDetectorGPU.DownloadKeypoints(gpuKeyPointsImage1, KeyPointsImage1);                                 //copy the Matrix from GPU to CPU

                                // extract features from Image2
                                using (GpuImage <Gray, Byte> gpuImage2 = new GpuImage <Gray, byte>(Image2G))
                                    using (GpuMat <float> gpuKeyPointsImage2 = surfDetectorGPU.DetectKeyPointsRaw(gpuImage2, null))
                                        using (GpuMat <float> gpuDescriptorsImage2 = surfDetectorGPU.ComputeDescriptorsRaw(gpuImage2, null, gpuKeyPointsImage2))

                                            //for each descriptor of each image2 , we find k best matching points and their distances from image1 descriptors

                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuDescriptorsImage2.Size.Height, k, 1, true))      //stores indices of k best mathces
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuDescriptorsImage2.Size.Height, k, 1, true)) //stores distance of k best matches

                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))               //stores result of comparison
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuDescriptorsImage2, gpuDescriptorsImage1, gpuMatchIndices, gpuMatchDist, k, null, stream); //matching descriptors of image2 to image1 and storing the k best indices and corresponding distances

                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream); //by setting stream, we perform an Async Task
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);         //col0 >= 0.8col1 , only then is it considered a good match
                                                                }

                                                            KeyPointsImage2 = new VectorOfKeyPoint();
                                                            surfDetectorGPU.DownloadKeypoints(gpuKeyPointsImage2, KeyPointsImage2);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(KeyPointsImage1, KeyPointsImage2, indices, mask, 1.5, 20); //count the number of nonzero points in the mask(this stored the comparison result of col0 >= 0.8col1)
                                                                //we can create a homography matrix only if we have atleast 4 matching points
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(KeyPointsImage1, KeyPointsImage2, indices, mask, 2);
                                                                }
                                                            }
                                                        }
                            }
            }
            else
            {
                Console.WriteLine("No CUDA");
                //extract features from image2
                KeyPointsImage1 = new VectorOfKeyPoint();
                Matrix <float> DescriptorsImage1 = surfDetectorCPU.DetectAndCompute(Image1G, null, KeyPointsImage1);

                //extract features from image1
                KeyPointsImage2 = new VectorOfKeyPoint();
                Matrix <float>            DescriptorsImage2 = surfDetectorCPU.DetectAndCompute(Image2G, null, KeyPointsImage2);
                BruteForceMatcher <float> matcher           = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(DescriptorsImage1);

                indices = new Matrix <int>(DescriptorsImage2.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(DescriptorsImage2.Rows, k))
                {
                    matcher.KnnMatch(DescriptorsImage2, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(KeyPointsImage1, KeyPointsImage2, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(KeyPointsImage1, KeyPointsImage2, indices, mask, 2);
                    }
                }
            }
            Image <Bgr, Byte> mImage = image1.Convert <Bgr, Byte>();
            Image <Bgr, Byte> oImage = image2.Convert <Bgr, Byte>();
            Image <Bgr, Byte> result = new Image <Bgr, byte>(mImage.Width + oImage.Width, mImage.Height);

            //Image<Bgr, Byte> temp = Features2DToolbox.DrawMatches(image1, KeyPointsImage1, image2, KeyPointsImage2, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = image1.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };

                homography.ProjectPoints(pts);

                HomographyMatrix origin = new HomographyMatrix();                //I perform a copy of the left image with a not real shift operation on the origin
                origin.SetIdentity();
                origin.Data[0, 2] = 0;
                origin.Data[1, 2] = 0;
                Image <Bgr, Byte> mosaic = new Image <Bgr, byte>(mImage.Width + oImage.Width, mImage.Height * 2);

                Image <Bgr, byte> warp_image = mosaic.Clone();
                mosaic = mImage.WarpPerspective(origin, mosaic.Width, mosaic.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_DEFAULT, new Bgr(0, 0, 0));

                warp_image = oImage.WarpPerspective(homography, warp_image.Width, warp_image.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Bgr(200, 0, 0));
                Image <Gray, byte> warp_image_mask = oImage.Convert <Gray, byte>();
                warp_image_mask.SetValue(new Gray(255));
                Image <Gray, byte> warp_mosaic_mask = mosaic.Convert <Gray, byte>();
                warp_mosaic_mask.SetZero();
                warp_mosaic_mask = warp_image_mask.WarpPerspective(homography, warp_mosaic_mask.Width, warp_mosaic_mask.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Gray(0));

                warp_image.Copy(mosaic, warp_mosaic_mask);
                if (flag == 1)
                {
                    Console.WriteLine("Using Image Blending");
                    return(blend(mosaic, warp_image, warp_mosaic_mask, 2));
                }
                else
                {
                    Console.WriteLine("No Image Blending");
                    return(mosaic);
                }
            }
            return(null);
        }
示例#27
0
 /// <summary>
 /// 
 /// </summary>
 /// <param name="img"></param>
 /// <param name="hitThreshold"></param>
 /// <returns></returns>
 public virtual Point[] Detect(GpuMat img, double hitThreshold)
 {
     return Detect(img, hitThreshold, new Size(0, 0), new Size(0, 0));
 }
示例#28
0
 /// <summary>
 /// Creates a proxy class of the specified GpuMat
 /// </summary>
 /// <param name="mat"></param>
 /// <returns></returns>
 public static InputArray Create(GpuMat mat)
 {
     return(new InputArray(mat));
 }
示例#29
0
        /// <summary>
        /// Memory set asynchronously
        /// </summary>
        /// <param name="src"></param>
        /// <param name="val"></param>
        /// <param name="mask"></param>
        public void EnqueueMemSet(GpuMat src, Scalar val, GpuMat mask)
        {
            ThrowIfDisposed();
            if (src == null)
                throw new ArgumentNullException("src");
            src.ThrowIfDisposed();

            NativeMethods.cuda_Stream_enqueueMemSet_WithMask(ptr, src.CvPtr, val, Cv2.ToPtr(mask));
        }
示例#30
0
        /// <summary>
        /// Detect image using SURF
        /// </summary>
        /// <param name="modelImage"></param>
        /// <param name="observedImage"></param>
        /// <param name="modelKeyPoints"></param>
        /// <param name="observedKeyPoints"></param>
        /// <param name="matches"></param>
        /// <param name="mask"></param>
        /// <param name="homography"></param>
        public static void FindMatch(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out int score)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            if (CudaInvoke.HasCuda)
            {
                CudaSURF surfCuda = new CudaSURF((float)hessianThresh);
                using (GpuMat gpuModelImage = new GpuMat(modelImage))
                    //extract features from the object image
                    using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
                            {
                                surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);

                                // extract features from the observed image
                                using (GpuMat gpuObservedImage = new GpuMat(observedImage))
                                    using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                        //using (GpuMat tmp = new GpuMat())
                                        //using (Stream stream = new Stream())
                                        {
                                            matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                                            surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                            mask.SetTo(new MCvScalar(255));
                                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                            score = 0;
                                            for (int i = 0; i < matches.Size; i++)
                                            {
                                                if ((byte)mask.GetData().GetValue(i, 0) == 0)
                                                {
                                                    continue;
                                                }
                                                foreach (var e in matches[i].ToArray())
                                                {
                                                    ++score;
                                                }
                                            }

                                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                                            if (nonZeroCount >= 4)
                                            {
                                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                           matches, mask, 1.5, 20);
                                                if (nonZeroCount >= 4)
                                                {
                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                          observedKeyPoints, matches, mask, 2);
                                                }
                                            }
                                        }
                            }
            }
            else
            {
                using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                    {
                        SURF surfCPU = new SURF(hessianThresh);
                        //extract features from the object image
                        UMat modelDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        // extract features from the observed image
                        UMat observedDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                        BFMatcher matcher = new BFMatcher(DistanceType.L2);
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        score = 0;
                        for (int i = 0; i < matches.Size; i++)
                        {
                            //if (mask.GetData(true)[0] == 0) continue;
                            foreach (var e in matches[i].ToArray())
                            {
                                ++score;
                            }
                        }

                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                      observedKeyPoints, matches, mask, 2);
                            }
                        }
                    }
            }
        }
示例#31
0
        /// <summary>
        /// Computes a background image which are the mean of all background gaussians
        /// </summary>
        /// <param name="backgroundImage"></param>
        /// <param name="stream"></param>
        public void GetBackgroundImage(
            GpuMat backgroundImage, Stream stream = null)
        {
            if (disposed)
                throw new ObjectDisposedException(GetType().Name);
            if (backgroundImage == null)
                throw new ArgumentNullException("backgroundImage");

            stream = stream ?? Stream.Null;

            NativeMethods.gpu_MOG_GPU_getBackgroundImage(
                ptr, backgroundImage.CvPtr, stream.CvPtr);

            GC.KeepAlive(backgroundImage);
            GC.KeepAlive(stream);
        }
示例#32
0
        public static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(500, false);
            Stopwatch    watch;

            homography = null;
#if !IOS
            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                            }

                                                            watch.Stop();
                                                        }
                            }
            }
            else
#endif
            {
                //extract features from the object image
                modelKeyPoints = new VectorOfKeyPoint();
                Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = new VectorOfKeyPoint();
                Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                watch.Stop();
            }
            matchTime = watch.ElapsedMilliseconds;
        }
示例#33
0
        public static bool FindModelImageInObservedImage(Image <Gray, byte> modelImage, Image <Gray, byte> observedImage)
        {
            var surfCpu = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGpu = new GpuSURFDetector(surfCpu.SURFParams, 0.01f);
                using (GpuImage <Gray, byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGpu.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGpu.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGpu.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);

                                // extract features from the observed image
                                using (GpuImage <Gray, byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGpu.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGpu.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (var stream = new Emgu.CV.GPU.Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGpu.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                                if ((double)nonZeroCount / mask.Height > 0.02)
                                                                {
                                                                    return(true);
                                                                }
                                                            }
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCpu.DetectKeyPointsRaw(modelImage, null);
                Matrix <float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                // extract features from the observed image
                observedKeyPoints = surfCpu.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>            observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                if ((double)nonZeroCount / mask.Height > 0.02)
                {
                    return(true);
                }
            }

            //Draw the matched keypoints
            //var result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(0, 0, 255), new Bgr(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            //result.Save( @"C:\Users\D.Markachev\Desktop\bleh-keypoints.jpg" );

            return(false);
        }
示例#34
0
        public static void Detect(
            Mat image, String faceFileName, String eyeFileName,
            List <Rectangle> faces, List <Rectangle> eyes,
            bool tryUseCuda,
            out long detectionTime)
        {
            Stopwatch watch;

#if !(__IOS__ || NETFX_CORE)
            if (tryUseCuda && CudaInvoke.HasCuda)
            {
                using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                    using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
                    {
                        face.ScaleFactor   = 1.1;
                        face.MinNeighbors  = 10;
                        face.MinObjectSize = Size.Empty;
                        eye.ScaleFactor    = 1.1;
                        eye.MinNeighbors   = 10;
                        eye.MinObjectSize  = Size.Empty;
                        watch = Stopwatch.StartNew();
                        using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image))
                            using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                                using (GpuMat region = new GpuMat())
                                {
                                    face.DetectMultiScale(gpuGray, region);
                                    Rectangle[] faceRegion = face.Convert(region);
                                    faces.AddRange(faceRegion);
                                    foreach (Rectangle f in faceRegion)
                                    {
                                        using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                                        {
                                            //For some reason a clone is required.
                                            //Might be a bug of CudaCascadeClassifier in opencv
                                            using (CudaImage <Gray, Byte> clone = faceImg.Clone(null))
                                                using (GpuMat eyeRegionMat = new GpuMat())
                                                {
                                                    eye.DetectMultiScale(clone, eyeRegionMat);
                                                    Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
                                                    foreach (Rectangle e in eyeRegion)
                                                    {
                                                        Rectangle eyeRect = e;
                                                        eyeRect.Offset(f.X, f.Y);
                                                        eyes.Add(eyeRect);
                                                    }
                                                }
                                        }
                                    }
                                }
                        watch.Stop();
                    }
            }
            else
#endif
            {
                //Read the HaarCascade objects
                using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                    using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
                    {
                        watch = Stopwatch.StartNew();
                        using (UMat ugray = new UMat())
                        {
                            CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                            //ugray.Save("first.bmp");

                            //normalizes brightness and increases contrast of the image
                            //CvInvoke.EqualizeHist(ugray, ugray);
                            //ugray.Save("second.bmp");
                            //Detect the faces  from the gray scale image and store the locations as rectangle
                            //The first dimensional is the channel
                            //The second dimension is the index of the rectangle in the specific channel
                            Rectangle[] facesDetected = face.DetectMultiScale(
                                ugray,
                                1.1,
                                10,
                                new Size(24, 24));

                            faces.AddRange(facesDetected);

                            foreach (Rectangle f in facesDetected)
                            {
                                //Get the region of interest on the faces
                                using (UMat faceRegion = new UMat(ugray, f))
                                {
                                    Rectangle[] eyesDetected = eye.DetectMultiScale(
                                        faceRegion,
                                        1.1,
                                        10,
                                        new Size(24, 24));

                                    foreach (Rectangle e in eyesDetected)
                                    {
                                        Rectangle eyeRect = e;
                                        eyeRect.Offset(f.X, f.Y);
                                        eyes.Add(eyeRect);
                                    }
                                }
                            }
                        }
                        watch.Stop();
                    }
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
示例#35
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints,
                                     out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, double hessianThresh)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            //double hessianThresh = 300;设置阈值,这个值越大,最终的特征点越少

            Stopwatch sw;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

#if !__IOS__
            //判断是否存在NVIDIA显卡,如果存在就是使用GPU进行计算
            if (CudaInvoke.HasCuda)
            {
                //SURF算法
                //创建一个CudaSurf 侦测器
                CudaSURF surfCuda = new CudaSURF((float)hessianThresh);
                //在Gpu中 使用GpuMat 来替代cv::Mat
                using (GpuMat gpuModelImage = new GpuMat(modelImage))

                    //从图像中提取特征点
                    using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
                        //创建特征点描述器
                        using (GpuMat gupModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            //创建匹配器
                            using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
                            {
                                surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                sw = Stopwatch.StartNew();

                                using (GpuMat gpuObservedImage = new GpuMat(observedImage))
                                    using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))

                                        //using (GpuMat tmp = new GpuMat())
                                        //using (Stream stream = new Stream())
                                        {
                                            matcher.KnnMatch(gpuObservedDescriptors, gpuObservedDescriptors, matches, k);

                                            surfCuda.DownloadKeypoints(gpuModelKeyPoints, observedKeyPoints);
                                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                            mask.SetTo(new MCvScalar(255));

                                            //过滤匹配特征,,如果匹配点是比较罕见,那么就剔除
                                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                                            //返回数组中的非零元素
                                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                                            if (nonZeroCount >= 4)
                                            {
                                                //剔除
                                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                                if (nonZeroCount >= 4)
                                                {
                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                                }
                                            }
                                        }
                                sw.Stop();
                            }
            }
            else
#endif
            {
                using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read)) {
                        //创建surf算法器
                        SURF surfCPU = new SURF(hessianThresh);

                        //从源的图像提取描述符
                        UMat modelDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                        sw = Stopwatch.StartNew();

                        //从观察图像中提取描述器
                        UMat observedDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                        //Brute Force匹配
                        BFMatcher matcher = new BFMatcher(DistanceType.L2);
                        matcher.Add(modelDescriptors);
                        //matches:VectorOfVectorOfDMatch
                        //observedDescriptors:VectorOfKeyPoint
                        matcher.KnnMatch(observedDescriptors, matches, k, null);

                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        //过滤匹配特征,,如果匹配点是比较罕见,那么就剔除
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                        //返回数组中的非零元素
                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            //剔除那些旋转和缩放不与大多数匹配和旋转统一的特征点
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                //使用RANDSAC算法获取单应性矩阵,如果矩阵不能恢复,返回null
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                            }
                        }
                        sw.Stop();
                    }
            }
            matchTime = sw.ElapsedMilliseconds;
        }
        public void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            #if !__IOS__
            if (CudaInvoke.HasCuda)
            {
                USINGGPU = "true";
                CudaSURF surfCuda = new CudaSURF((float)hessianThresh);
                using (GpuMat gpuModelImage = new GpuMat(modelImage))
                    //extract features from the object image
                    using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
                            {
                                surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuMat gpuObservedImage = new GpuMat(observedImage))
                                    using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                        //using (GpuMat tmp = new GpuMat())
                                        //using (Stream stream = new Stream())
                                        {
                                            matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                                            surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                            mask.SetTo(new MCvScalar(255));
                                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                                            if (nonZeroCount >= 4)
                                            {
                                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                           matches, mask, 1.5, 20);
                                                if (nonZeroCount >= 4)
                                                {
                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                          observedKeyPoints, matches, mask, 2);
                                                }
                                            }
                                        }
                                watch.Stop();
                            }
            }
            else
            #endif
            {
                //using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                //using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
                //{
                SURF surfCPU = new SURF(hessianThresh);
                //extract features from the object image
                UMat modelDescriptors = new UMat();
                surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                UMat observedDescriptors = new UMat();
                surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);
                BFMatcher matcher = new BFMatcher(DistanceType.L2);
                matcher.Add(modelDescriptors);

                matcher.KnnMatch(observedDescriptors, matches, k, null);
                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                               matches, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                              observedKeyPoints, matches, mask, 2);
                    }
                }

                watch.Stop();
                //}
            }
            matchTime = watch.ElapsedMilliseconds;
        }
示例#37
0
 /// <summary>
 /// Obtain a GpuMat from the keypoints array
 /// </summary>
 /// <param name="src">The keypoints array</param>
 /// <param name="dst">A GpuMat that represent the keypoints</param>
 public void UploadKeypoints(VectorOfKeyPoint src, GpuMat dst)
 {
     XFeatures2DInvoke.cudaSURFUploadKeypoints(_ptr, src, dst);
 }
示例#38
0
 /// <summary>
 /// Creates/Sets a matrix header for the specified row/column span.
 /// </summary>
 /// <param name="start"></param>
 /// <param name="end"></param>
 /// <param name="value"></param>
 public virtual void Set(int start, int end, GpuMat value)
 {
     this[start, end] = value;
 }
示例#39
0
        /// <summary>
        /// Finds the keypoints using FAST detector.
        /// </summary>
        /// <param name="image">Image where keypoints (corners) are detected. 
        /// Only 8-bit grayscale images are supported.</param>
        /// <param name="mask">Optional input mask that marks the regions where we should detect features.</param>
        /// <param name="keypoints">The output vector of keypoints.</param>
        public void Run(GpuMat image, GpuMat mask, out KeyPoint[] keypoints)
        {
            if (disposed)
                throw new ObjectDisposedException(GetType().Name);
            if (image == null)
                throw new ArgumentNullException("image");
            if (mask == null)
                throw new ArgumentNullException("mask");

            using (var keypointsVec = new VectorOfKeyPoint())
            {
                NativeMethods.gpu_FAST_GPU_operator2(ptr, image.CvPtr, mask.CvPtr, keypointsVec.CvPtr);
                keypoints = keypointsVec.ToArray();
            }

            GC.KeepAlive(image);
            GC.KeepAlive(mask);
        }
 /// <summary>
 /// Creates/Sets a matrix header for the specified row/column span.
 /// </summary>
 /// <param name="range"></param>
 /// <param name="value"></param>
 public virtual void Set(Range range, GpuMat value)
 {
     this[range.Start, range.End] = value;
 }
示例#41
0
        /// <summary>
        /// Find keypoints and compute it’s response if nonmaxSuppression is true.
        /// </summary>
        /// <param name="image">Image where keypoints (corners) are detected. Only 8-bit grayscale images are supported.</param>
        /// <param name="mask">Optional input mask that marks the regions where we should detect features.</param>
        /// <returns>count of detected keypoints</returns>
        public int CalcKeyPointsLocation(GpuMat image, GpuMat mask)
        {
            if (disposed)
                throw new ObjectDisposedException(GetType().Name);
            if (image == null)
                throw new ArgumentNullException("image");
            if (mask == null)
                throw new ArgumentNullException("mask");

            int result = NativeMethods.gpu_FAST_GPU_calcKeyPointsLocation(ptr, image.CvPtr, mask.CvPtr);

            GC.KeepAlive(image);
            GC.KeepAlive(mask);
            return result;
        }
 /// <summary>
 ///
 /// </summary>
 /// <param name="parent"></param>
 protected internal GpuMatRowColIndexer(GpuMat parent)
 {
     this.parent = parent;
 }
示例#43
0
        /*
        /// <summary>
        /// 
        /// </summary>
        /// <returns></returns>
        public bool CheckDetectorSize()
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            return GpuInvoke.HOGDescriptor_checkDetectorSize(ptr) != 0;
        }

        /// <summary>
        /// 
        /// </summary>
        /// <returns></returns>
        public double GetWinSigma()
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            return GpuInvoke.HOGDescriptor_getWinSigma(ptr);
        }
        */

        #region Detect
        /// <summary>
        /// 
        /// </summary>
        /// <param name="img"></param>
        /// <returns></returns>
        public virtual Point[] Detect(GpuMat img)
        {
            return Detect(img, 0, new Size(0, 0), new Size(0, 0));
        }
示例#44
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image<Bgr, Byte> Draw(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime)
        {
            Stopwatch watch;
            HomographyMatrix homography = null;

            SURFDetector surfCPU = new SURFDetector (500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix<int> indices;

            Matrix<byte> mask;
            int k = 2;
            double uniquenessThreshold = 0.8;
            if (GpuInvoke.HasCuda) {
                GpuSURFDetector surfGPU = new GpuSURFDetector (surfCPU.SURFParams, 0.01f);
                using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte> (modelImage))
                    //extract features from the object image
                using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw (gpuModelImage, null))
                using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw (gpuModelImage, null, gpuModelKeyPoints))
                using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float> (DistanceType.L2)) {
                    modelKeyPoints = new VectorOfKeyPoint ();
                    surfGPU.DownloadKeypoints (gpuModelKeyPoints, modelKeyPoints);
                    watch = Stopwatch.StartNew ();

                    // extract features from the observed image
                    using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte> (observedImage))
                    using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw (gpuObservedImage, null))
                    using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw (gpuObservedImage, null, gpuObservedKeyPoints))
                    using (GpuMat<int> gpuMatchIndices = new GpuMat<int> (gpuObservedDescriptors.Size.Height, k, 1, true))
                    using (GpuMat<float> gpuMatchDist = new GpuMat<float> (gpuObservedDescriptors.Size.Height, k, 1, true))
                    using (GpuMat<Byte> gpuMask = new GpuMat<byte> (gpuMatchIndices.Size.Height, 1, 1))
                    using (Stream stream = new Stream ()) {
                        matcher.KnnMatchSingle (gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                        indices = new Matrix<int> (gpuMatchIndices.Size);
                        mask = new Matrix<byte> (gpuMask.Size);

                        //gpu implementation of voteForUniquess
                        using (GpuMat<float> col0 = gpuMatchDist.Col (0))
                        using (GpuMat<float> col1 = gpuMatchDist.Col (1)) {
                            GpuInvoke.Multiply (col1, new MCvScalar (uniquenessThreshold), col1, stream);
                            GpuInvoke.Compare (col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                        }

                        observedKeyPoints = new VectorOfKeyPoint ();
                        surfGPU.DownloadKeypoints (gpuObservedKeyPoints, observedKeyPoints);

                        //wait for the stream to complete its tasks
                        //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                        stream.WaitForCompletion ();

                        gpuMask.Download (mask);
                        gpuMatchIndices.Download (indices);

                        if (GpuInvoke.CountNonZero (gpuMask) >= 4) {
                            int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation (modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures (modelKeyPoints, observedKeyPoints, indices, mask, 2);
                        }

                        watch.Stop ();
                    }
                }
            } else {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw (modelImage, null);
                Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw (modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew ();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw (observedImage, null);
                Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw (observedImage, null, observedKeyPoints);
                BruteForceMatcher<float> matcher = new BruteForceMatcher<float> (DistanceType.L2);
                matcher.Add (modelDescriptors);

                indices = new Matrix<int> (observedDescriptors.Rows, k);
                using (Matrix<float> dist = new Matrix<float> (observedDescriptors.Rows, k)) {
                    matcher.KnnMatch (observedDescriptors, indices, dist, k, null);
                    mask = new Matrix<byte> (dist.Rows, 1);
                    mask.SetValue (255);
                    Features2DToolbox.VoteForUniqueness (dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero (mask);
                if (nonZeroCount >= 4) {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation (modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures (modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }

                watch.Stop ();
            }

            //Draw the matched keypoints
            Image<Bgr, Byte> result = Features2DToolbox.DrawMatches (modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                          indices, new Bgr (255, 255, 255), new Bgr (255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null) {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[] pts = new PointF[] {
                    new PointF (rect.Left, rect.Bottom),
                    new PointF (rect.Right, rect.Bottom),
                    new PointF (rect.Right, rect.Top),
                    new PointF (rect.Left, rect.Top)
                };
                homography.ProjectPoints (pts);

                result.DrawPolyline (Array.ConvertAll<PointF, Point> (pts, Point.Round), true, new Bgr (Color.Red), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return result;
        }
示例#45
0
 /// <summary>
 /// 
 /// </summary>
 /// <param name="img"></param>
 /// <param name="hitThreshold"></param>
 /// <param name="winStride"></param>
 /// <returns></returns>
 public virtual Point[] Detect(GpuMat img, double hitThreshold, Size winStride)
 {
     return Detect(img, hitThreshold, winStride, new Size(0, 0));
 }
        public List<Face> FindFaces(Image<Bgr, byte> image, string faceFileName, string eyeFileName, double scale, int neighbors, int minSize)
        {
            List<Face> faces = new List<Face>();
            List<Rectangle> facesRect = new List<Rectangle>();
            List<Rectangle> eyesRect = new List<Rectangle>();
            try
            {
                //Console.WriteLine(" FaceDetectGPU FindFaces faceFileName=" + faceFileName + " cuda = " + CudaInvoke.HasCuda);
                using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                {
                    using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(image))
                    using (CudaImage<Gray, Byte> CudaGray = CudaImage.Convert<Gray, Byte>())
                    using (GpuMat region = new GpuMat())
                    {

                        face.DetectMultiScale(CudaGray, region);
                        Rectangle[] faceRegion = face.Convert(region);
                        facesRect.AddRange(faceRegion);
                        foreach (Rectangle f in faceRegion)
                        {
                            using (CudaImage<Gray, Byte> faceImg = CudaGray.GetSubRect(f))
                            {
                                using (CudaImage<Gray, Byte> clone = faceImg.Clone(null))
                                {
                                    Face facemodel = new Face();
                                    eyesRect = new List<Rectangle>(FindEyes(eyeFileName, clone));
                                    if (eyesRect != null)
                                    {
                                        facemodel.EyesRects = eyesRect;
                                        facemodel.EyesCount = eyesRect.Count;
                                    }
                                    else
                                    {
                                        continue;
                                    }
                                    facemodel.FaceImage = clone.Bitmap;
                                    facemodel.Height = facemodel.FaceImage.Height;
                                    facemodel.Width = facemodel.FaceImage.Width;
                                    facemodel.FaceRect = f;
                                    facemodel.FramePosX = f.X;
                                    facemodel.FramePosY = f.Y;
                                    facemodel.ImageFrameSize = image.Size;

                                    Gray avgf = new Gray();
                                    MCvScalar avstd = new MCvScalar();
                                    clone.ToImage().AvgSdv(out avgf, out avstd);
                                    facemodel.StdDev = avstd.V0;
                                    faces.Add(facemodel);
                                    if (facemodel.FaceScore > 39)
                                        Console.WriteLine("FaceDetect USING gpuCUDA Add faceModel" + facemodel.FaceScore);

                                    break;
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception cudaerrJones)
            {
                Console.WriteLine("cudaerrJones = " + cudaerrJones);
            }

            return faces;
        }
示例#47
0
        /// <summary>
        /// Copy asynchronously
        /// </summary>
        /// <param name="src"></param>
        /// <param name="dst"></param>
        public void EnqueueCopy(GpuMat src, GpuMat dst)
        {
            ThrowIfDisposed();
            if (src == null)
                throw new ArgumentNullException("src");
            if (dst == null)
                throw new ArgumentNullException("dst");
            src.ThrowIfDisposed();
            dst.ThrowIfDisposed();

            NativeMethods.cuda_Stream_enqueueCopy(ptr, src.CvPtr, dst.CvPtr);
        }
示例#48
0
 /// <summary>
 /// Obtain the keypoints array from GpuMat
 /// </summary>
 /// <param name="src">The keypoints obtained from DetectKeyPointsRaw</param>
 /// <param name="dst">The vector of keypoints</param>
 public void DownloadKeypoints(GpuMat src, VectorOfKeyPoint dst)
 {
     XFeatures2DInvoke.cudaSURFDownloadKeypoints(_ptr, src, dst);
 }
示例#49
0
        /// <summary>
        /// converts matrix type, ex from float to uchar depending on type
        /// </summary>
        /// <param name="src"></param>
        /// <param name="dst"></param>
        /// <param name="dtype"></param>
        /// <param name="a"></param>
        /// <param name="b"></param>
        public void EnqueueConvert(GpuMat src, GpuMat dst, int dtype, double a = 1, double b = 0)
        {
            ThrowIfDisposed();
            if (src == null)
                throw new ArgumentNullException("src");
            if (dst == null)
                throw new ArgumentNullException("dst");
            src.ThrowIfDisposed();
            dst.ThrowIfDisposed();

            NativeMethods.cuda_Stream_enqueueConvert(ptr, src.CvPtr, dst.CvPtr, dtype, a, b);
        }
 /// <summary>
 /// Creates a proxy class of the specified matrix
 /// </summary>
 /// <param name="mat"></param>
 /// <returns></returns>
 public static OutputArray Create(GpuMat mat)
 {
     return(new OutputArray(mat));
 }
示例#51
0
        /// <summary>
        /// the update operator [MOG_GPU::operator()]
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="fgmask"></param>
        /// <param name="learningRate"></param>
        /// <param name="stream"></param>
        public void Update(
            GpuMat frame, GpuMat fgmask, float learningRate = 0.0f, Stream stream = null)
        {
            if (disposed)
                throw new ObjectDisposedException(GetType().Name);
            if (frame == null)
                throw new ArgumentNullException("frame");
            if (fgmask == null)
                throw new ArgumentNullException("fgmask");

            stream = stream ?? Stream.Null;

            NativeMethods.gpu_MOG_GPU_operator(
                ptr, frame.CvPtr, fgmask.CvPtr, learningRate, stream.CvPtr);

            GC.KeepAlive(frame);
            GC.KeepAlive(fgmask);
            GC.KeepAlive(stream);
        }
示例#52
0
        public void TestBruteForceHammingDistance()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, byte>       box   = new Image <Gray, byte>("box.png");
                FastDetector             fast  = new FastDetector(100, true);
                BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(box, modelKeypoints);
                Mat modelDescriptors = new Mat();
                brief.Compute(box, modelKeypoints, modelDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

                #region extract features from the observed image
                stopwatch.Reset(); stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(observedImage, observedKeypoints);
                Mat observedDescriptors = new Mat();
                brief.Compute(observedImage, observedKeypoints, observedDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Mat homography = null;
                using (GpuMat <Byte> gpuModelDescriptors = new GpuMat <byte>(modelDescriptors)) //initialization of GPU code might took longer time.
                {
                    stopwatch.Reset(); stopwatch.Start();
                    CudaBFMatcher hammingMatcher = new CudaBFMatcher(DistanceType.Hamming);

                    //BFMatcher hammingMatcher = new BFMatcher(BFMatcher.DistanceType.Hamming, modelDescriptors);
                    int            k        = 2;
                    Matrix <int>   trainIdx = new Matrix <int>(observedKeypoints.Size, k);
                    Matrix <float> distance = new Matrix <float>(trainIdx.Size);

                    using (GpuMat <Byte> gpuObservedDescriptors = new GpuMat <byte>(observedDescriptors))
                        //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true))
                        //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true))
                        using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        {
                            Stopwatch w2 = Stopwatch.StartNew();
                            //hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);
                            hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null, true);
                            w2.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec",
                                                          w2.ElapsedMilliseconds));
                            //gpuTrainIdx.Download(trainIdx);
                            //gpuDistance.Download(distance);


                            Mat mask = new Mat(distance.Rows, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints,
                                                                                           matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints,
                                                                                                          observedKeypoints, matches, mask, 2);
                                }
                                nonZeroCount = CvInvoke.CountNonZero(mask);
                            }

                            stopwatch.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec",
                                                          stopwatch.ElapsedMilliseconds));
                        }
                }

                if (homography != null)
                {
                    Rectangle rect = box.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    PointF[] points = CvInvoke.PerspectiveTransform(pts, homography);
                    //homography.ProjectPoints(points);

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = box.ConcateVertical(observedImage);

                    for (int i = 0; i < points.Length; i++)
                    {
                        points[i].Y += box.Height;
                    }
                    res.DrawPolyline(Array.ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                    //ImageViewer.Show(res);
                }
            }
        }
 /// <summary>
 /// Creates/Sets a matrix header for the specified matrix row/column.
 /// </summary>
 /// <param name="pos"></param>
 /// <param name="value"></param>
 public virtual void Set(int pos, GpuMat value)
 {
     this[pos] = value;
 }
示例#54
0
 /// <summary>
 /// Creates continuous GPU matrix
 /// </summary>
 /// <param name="size">Number of rows and columns in a 2D array.</param>
 /// <param name="type">Array type.</param>
 /// <param name="m"></param>
 public static void CreateContinuous(Size size, MatType type, GpuMat m)
 {
     ThrowIfGpuNotAvailable();
     CreateContinuous(size.Height, size.Width, type, m);
 }
        /*
          /// <summary>
          /// Add the model descriptors
          /// </summary>
          /// <param name="modelDescriptors">The model discriptors</param>
          public void Add(Matrix<Byte> modelDescriptors)
          {
         if (!(_distanceType == DistanceType.HammingDist))
            throw new ArgumentException("Hamming distance type requires model descriptor to be Matrix<Byte>");
         gpuBruteForceMatcherAdd(_ptr, modelDescriptors);
          }

          /// <summary>
          /// Add the model descriptors
          /// </summary>
          /// <param name="modelDescriptors">The model discriptors</param>
          public void Add(Matrix<float> modelDescriptors)
          {
         if (!(_distanceType == DistanceType.L2 || _distanceType == DistanceType.L1))
            throw new ArgumentException("L1 / L2 distance type requires model descriptor to be Matrix<float>");
         gpuBruteForceMatcherAdd(_ptr, modelDescriptors);
          }*/
        /// <summary>
        /// For L1 and L2 distance type, find the k nearest neighbour using the brute force matcher. 
        /// </summary>
        /// <param name="queryDescriptors">The query descriptors</param>
        /// <param name="modelDescriptors">The model descriptors</param>
        /// <param name="modelIdx">The model index. A n x <paramref name="k"/> matrix where n = <paramref name="queryDescriptors"/>.Cols</param>
        /// <param name="distance">The matrix where the distance valus is stored. A n x <paramref name="k"/> matrix where n = <paramref name="queryDescriptors"/>.Size.Height</param>
        /// <param name="k">The number of nearest neighbours to be searched</param>
        /// <param name="mask">The mask</param>
        public void KnnMatch(GpuMat<float> queryDescriptors, GpuMat<float> modelDescriptors, GpuMat<int> modelIdx, GpuMat<float> distance, int k, GpuMat<Byte> mask)
        {
            gpuBruteForceMatcherKnnMatch(_ptr, queryDescriptors, modelDescriptors, modelIdx, distance, k, mask);
        }
示例#56
0
        static void Run()
        {
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("box.png");
            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");
            Stopwatch          watch;
            HomographyMatrix   homography = null;

            SURFDetector surfCPU = new SURFDetector(500, false);

            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <float>   dist;
            Matrix <byte>    mask;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);

                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

                BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);
                dist    = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                    }
                }

                watch.Stop();
            }

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DTracker.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DTracker.KeypointDrawType.NOT_DRAW_SINGLE_POINTS);

            #region draw the projected region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            ImageViewer.Show(result, String.Format("Matched using {0} in {1} milliseconds", GpuInvoke.HasCuda ? "GPU" : "CPU", watch.ElapsedMilliseconds));
        }
        public static Rectangle[] FindPerson(IInputArray image, out long processingTime)
        {
            Stopwatch watch = new Stopwatch();

            Rectangle[] regions = null;


            if (Controller.Instance.Cuda)
            {
                GpuMat GpuImage = new GpuMat(image);

                using (InputArray iaImage = GpuImage.GetInputArray())
                {
                    //if the input array is a GpuMat
                    //check if there is a compatible Cuda device to run pedestrian detection
                    if (iaImage.Kind == InputArray.Type.CudaGpuMat)
                    {
                        //this is the Cuda version
                        using (CudaHOG des = new CudaHOG(
                                   new Size(64, 128),
                                   new Size(16, 16),
                                   new Size(8, 8),
                                   new Size(8, 8)))
                        {
                            des.SetSVMDetector(des.GetDefaultPeopleDetector());

                            watch = Stopwatch.StartNew();
                            using (GpuMat cudaBgra = new GpuMat())
                                using (VectorOfRect vr = new VectorOfRect())
                                {
                                    CudaInvoke.CvtColor(image, cudaBgra, ColorConversion.Bgr2Bgra);
                                    des.DetectMultiScale(cudaBgra, vr);
                                    regions = vr.ToArray();
                                }
                        }
                    }
                }
            }
            else
            {
                using (InputArray iaImage = image.GetInputArray())
                {
                    //this is the CPU/OpenCL version
                    using (HOGDescriptor des = new HOGDescriptor())
                    {
                        des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
                        watch = Stopwatch.StartNew();

                        MCvObjectDetection[] results = des.DetectMultiScale(image);
                        regions = new Rectangle[results.Length];
                        for (int i = 0; i < results.Length; i++)
                        {
                            regions[i] = results[i].Rect;
                        }
                        watch.Stop();
                    }
                }
            }


            processingTime = watch.ElapsedMilliseconds;

            return(regions);
        }
示例#58
0
        public static void Detect(
            IInputArray image, String faceFileName,
            List <Rectangle> faces, List <Rectangle> eyes,
            out long detectionTime)
        {
            Stopwatch watch;

            using (InputArray iaImage = image.GetInputArray())
            {
#if !(__IOS__ || NETFX_CORE)
                if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda)
                {
                    using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                    {
                        face.ScaleFactor   = 1.1;
                        face.MinNeighbors  = 10;
                        face.MinObjectSize = Size.Empty;

                        watch = Stopwatch.StartNew();
                        using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image))
                            using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                                using (GpuMat region = new GpuMat())
                                {
                                    face.DetectMultiScale(gpuGray, region);
                                    Rectangle[] faceRegion = face.Convert(region);
                                    faces.AddRange(faceRegion);
                                }
                        watch.Stop();
                    }
                }
                else
#endif
                {
                    //Read the HaarCascade objects
                    using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                    {
                        watch = Stopwatch.StartNew();

                        using (UMat ugray = new UMat())
                        {
                            CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                            //normalizes brightness and increases contrast of the image
                            CvInvoke.EqualizeHist(ugray, ugray);

                            //Detect the faces  from the gray scale image and store the locations as rectangle
                            //The first dimensional is the channel
                            //The second dimension is the index of the rectangle in the specific channel
                            Rectangle[] facesDetected = face.DetectMultiScale(
                                ugray,
                                1.1,
                                10,
                                new Size(20, 20));

                            faces.AddRange(facesDetected);
                        }
                        watch.Stop();
                    }
                }
                detectionTime = watch.ElapsedMilliseconds;
            }
        }