private Rectangle[] FindEyes(string eyeFileName, CudaImage<Gray, Byte> image)
 {
     using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
     using (GpuMat eyeRegionMat = new GpuMat())
     {
         eye.DetectMultiScale(image, eyeRegionMat);
         Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
         return eyeRegion;
     }
 }
Example #2
0
        public void TestHOG1()
        {
            if (CudaInvoke.HasCuda)
            {
                using (CudaHOGDescriptor hog = new CudaHOGDescriptor())
                    using (Image <Bgr, Byte> image = new Image <Bgr, byte>("pedestrian.png"))
                    {
                        float[] pedestrianDescriptor = CudaHOGDescriptor.GetDefaultPeopleDetector();
                        hog.SetSVMDetector(pedestrianDescriptor);

                        Stopwatch   watch = Stopwatch.StartNew();
                        Rectangle[] rects;
                        using (CudaImage <Bgr, Byte> CudaImage = new CudaImage <Bgr, byte>(image))
                            using (CudaImage <Bgra, Byte> gpuBgra = CudaImage.Convert <Bgra, Byte>())
                                rects = hog.DetectMultiScale(gpuBgra);
                        watch.Stop();

                        Assert.AreEqual(1, rects.Length);

                        foreach (Rectangle rect in rects)
                        {
                            image.Draw(rect, new Bgr(Color.Red), 1);
                        }
                        Trace.WriteLine(String.Format("HOG detection time: {0} ms", watch.ElapsedMilliseconds));

                        //ImageViewer.Show(image, String.Format("Detection Time: {0}ms", watch.ElapsedMilliseconds));
                    }
            }
        }
        public Image <Bgra, byte> CalcuateMeanshift(Image <Bgra, byte> imgInput, int spatialWindow = 5, int colorWindow = 5, int MinSegmentSize = 20)
        {
            //if (imgInput == null)
            //{
            //    return;
            //}
            //try
            //{
            Image <Bgra, byte> imgOutput = new Image <Bgra, byte>(imgInput.Width, imgInput.Height, new Bgra(0, 0, 0, 0));

            //convert the image to BGRA as it requires a BGRA to pass it in constructor of CudaImage

            CudaImage <Bgra, byte> _inputCuda = new CudaImage <Bgra, byte>(imgInput);

            //CudaInvoke.MeanShiftSegmentation(_inputCuda, imgOutput, spatialWindow, colorWindow, MinSegmentSize, new MCvTermCriteria(1, .001));
            /* pictureBox2.Image =*/
            return(imgOutput);

            //}
            //catch (Exception ex)
            //{
            //return
            //      MessageBox.Show("Meam shift error: " + ex.Message);
            //}
        }
Example #4
0
        public void TestHOG1()
        {
            if (CudaInvoke.HasCuda)
            {
                using (CudaHOG hog = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8), 9))
                    using (Mat pedestrianDescriptor = hog.GetDefaultPeopleDetector())
                        using (Image <Bgr, Byte> image = new Image <Bgr, byte>("pedestrian.png"))
                        {
                            hog.SetSVMDetector(pedestrianDescriptor);
                            //hog.GroupThreshold = 0;
                            Stopwatch   watch = Stopwatch.StartNew();
                            Rectangle[] rects;
                            using (CudaImage <Bgr, Byte> CudaImage = new CudaImage <Bgr, byte>(image))
                                using (CudaImage <Bgra, Byte> gpuBgra = CudaImage.Convert <Bgra, Byte>())
                                    using (VectorOfRect vRect = new VectorOfRect())
                                    {
                                        hog.DetectMultiScale(gpuBgra, vRect);
                                        rects = vRect.ToArray();
                                    }
                            watch.Stop();

                            Assert.AreEqual(1, rects.Length);

                            foreach (Rectangle rect in rects)
                            {
                                image.Draw(rect, new Bgr(Color.Red), 1);
                            }
                            Trace.WriteLine(String.Format("HOG detection time: {0} ms", watch.ElapsedMilliseconds));

                            //ImageViewer.Show(image, String.Format("Detection Time: {0}ms", watch.ElapsedMilliseconds));
                        }
            }
        }
Example #5
0
        public void TestCudaFASTDetector()
        {
            if (!CudaInvoke.HasCuda)
            {
                return;
            }
            using (Image <Bgr, Byte> img = new Image <Bgr, byte>("box.png"))
                using (CudaImage <Bgr, Byte> CudaImage = new CudaImage <Bgr, byte>(img))
                    using (CudaImage <Gray, Byte> grayCudaImage = CudaImage.Convert <Gray, Byte>())
                        using (CudaFastFeatureDetector featureDetector = new CudaFastFeatureDetector(10, true, FastDetector.DetectorType.Type9_16, 1000))
                            using (VectorOfKeyPoint kpts = new VectorOfKeyPoint())
                                using (GpuMat keyPointsMat = new GpuMat())
                                {
                                    featureDetector.DetectAsync(grayCudaImage, keyPointsMat);
                                    featureDetector.Convert(keyPointsMat, kpts);
                                    //featureDetector.DetectKeyPointsRaw(grayCudaImage, null, keyPointsMat);

                                    //featureDetector.DownloadKeypoints(keyPointsMat, kpts);

                                    foreach (MKeyPoint kpt in kpts.ToArray())
                                    {
                                        img.Draw(new CircleF(kpt.Point, 3.0f), new Bgr(0, 255, 0), 1);
                                    }

                                    //ImageViewer.Show(img);
                                }
        }
Example #6
0
        public void TestCudaRemap()
        {
            if (!CudaInvoke.HasCuda)
            {
                return;
            }
            Image <Gray, float> xmap = new Image <Gray, float>(2, 2);

            xmap.Data[0, 0, 0] = 0; xmap.Data[0, 1, 0] = 0;
            xmap.Data[1, 0, 0] = 1; xmap.Data[1, 1, 0] = 1;
            Image <Gray, float> ymap = new Image <Gray, float>(2, 2);

            ymap.Data[0, 0, 0] = 0; ymap.Data[0, 1, 0] = 1;
            ymap.Data[1, 0, 0] = 0; ymap.Data[1, 1, 0] = 1;

            Image <Gray, Byte> image = new Image <Gray, byte>(2, 2);

            image.SetRandNormal(new MCvScalar(), new MCvScalar(255));

            using (CudaImage <Gray, Byte> CudaImage = new CudaImage <Gray, byte>(image))
                using (CudaImage <Gray, float> xCudaImage = new CudaImage <Gray, float>(xmap))
                    using (CudaImage <Gray, float> yCudaImage = new CudaImage <Gray, float>(ymap))
                        using (CudaImage <Gray, Byte> remapedImage = new CudaImage <Gray, byte>(CudaImage.Size))
                        {
                            CudaInvoke.Remap(CudaImage, remapedImage, xCudaImage, yCudaImage, CvEnum.Inter.Cubic, CvEnum.BorderType.Default, new MCvScalar(), null);
                        }
        }
Example #7
0
        /// <summary>
        /// Apply cascade to an input frame and return the array of decection objects.
        /// </summary>
        /// <param name="image">A frame on which detector will be applied.</param>
        /// <param name="rois">A regions of interests mask generated by genRoi. Only the objects that fall into one of the regions will be returned.</param>
        /// <param name="stream">Use a Stream to call the function asynchronously (non-blocking) or null to call the function synchronously (blocking).</param>
        /// <returns>An array of decection objects</returns>
        public GpuMat Detect(CudaImage <Bgr, Byte> image, GpuMat <int> rois, Emgu.CV.Cuda.Stream stream = null)
        {
            GpuMat result = new GpuMat();

            SoftCascadeInvoke.cudaSoftCascadeDetectorDetect(_ptr, image, rois, result, stream);
            return(result);
        }
Example #8
0
        /// <summary>
        /// Convert the CudaImage to its equivalent Bitmap representation
        /// </summary>
        public static Bitmap ToBitmap <TColor, TDepth>(this CudaImage <TColor, TDepth> cudaImage) where
        TColor : struct, IColor
            where TDepth : new()
        {
            if (typeof(TColor) == typeof(Bgr) && typeof(TDepth) == typeof(Byte))
            {
                Size   s      = cudaImage.Size;
                Bitmap result = new Bitmap(s.Width, s.Height, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
                System.Drawing.Imaging.BitmapData data = result.LockBits(new Rectangle(Point.Empty, result.Size),
                                                                         System.Drawing.Imaging.ImageLockMode.WriteOnly, result.PixelFormat);
                using (Image <TColor, TDepth> tmp = new Image <TColor, TDepth>(s.Width, s.Height, data.Stride, data.Scan0)
                       )
                {
                    cudaImage.Download(tmp);
                }

                result.UnlockBits(data);
                return(result);
            }
            else
            {
                using (Image <TColor, TDepth> tmp = cudaImage.ToImage())
                {
                    return(tmp.ToBitmap());
                }
            }
        }
        public static void Detect(
            Mat image, String faceFileName,
            List <Rectangle> faces,
            bool tryUseCuda,
            out long detectionTime)
        {
            Stopwatch watch;

         #if !(__IOS__ || NETFX_CORE)
            if (tryUseCuda && CudaInvoke.HasCuda)
            {
                using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                {
                    face.ScaleFactor   = 1.1;
                    face.MinNeighbors  = 10;
                    face.MinObjectSize = Size.Empty;
                    watch = Stopwatch.StartNew();
                    using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image))
                        using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                            using (GpuMat region = new GpuMat())
                            {
                                face.DetectMultiScale(gpuGray, region);
                                Rectangle[] faceRegion = face.Convert(region);
                                faces.AddRange(faceRegion);
                            }
                    watch.Stop();
                }
            }
            else
         #endif
            {
                //Read the HaarCascade objects
                using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                {
                    watch = Stopwatch.StartNew();
                    using (UMat ugray = new UMat())
                    {
                        CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                        //normalizes brightness and increases contrast of the image
                        CvInvoke.EqualizeHist(ugray, ugray);

                        //Detect the faces  from the gray scale image and store the locations as rectangle
                        //The first dimensional is the channel
                        //The second dimension is the index of the rectangle in the specific channel
                        Rectangle[] facesDetected = face.DetectMultiScale(
                            ugray,
                            1.1,
                            10,
                            new Size(20, 20));

                        faces.AddRange(facesDetected);
                    }
                    watch.Stop();
                }
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
Example #10
0
        public static void Detect(
            Mat image, String faceFileName, String eyeFileName,
            List <Rectangle> faces, List <Rectangle> eyes,
            bool tryUseCuda)
        {
#if !(__IOS__ || NETFX_CORE)
            if (tryUseCuda && CudaInvoke.HasCuda)
            {
                if (face == null)
                {
                    face = new CudaCascadeClassifier(faceFileName);
                }
                if (eye == null)
                {
                    eye = new CudaCascadeClassifier(eyeFileName);
                }
                //using (face)
                //using (eye)
                {
                    face.ScaleFactor   = 1.1;
                    face.MinNeighbors  = 10;
                    face.MinObjectSize = Size.Empty;
                    eye.ScaleFactor    = 1.1;
                    eye.MinNeighbors   = 10;
                    eye.MinObjectSize  = Size.Empty;
                    using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image))
                        using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                            using (GpuMat region = new GpuMat())
                            {
                                face.DetectMultiScale(gpuGray, region);
                                Rectangle[] faceRegion = face.Convert(region);
                                faces.AddRange(faceRegion);
                                foreach (Rectangle f in faceRegion)
                                {
                                    using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                                    {
                                        //For some reason a clone is required.
                                        //Might be a bug of CudaCascadeClassifier in opencv
                                        using (CudaImage <Gray, Byte> clone = faceImg.Clone(null))
                                            using (GpuMat eyeRegionMat = new GpuMat())
                                            {
                                                eye.DetectMultiScale(clone, eyeRegionMat);
                                                Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
                                                foreach (Rectangle e in eyeRegion)
                                                {
                                                    Rectangle eyeRect = e;
                                                    eyeRect.Offset(f.X, f.Y);
                                                    eyes.Add(eyeRect);
                                                }
                                            }
                                    }
                                }
                            }
                }
            }
#endif
        }
        /// <summary>
        /// Нахождение знака по методу Хаара
        /// </summary>
        /// <param name="image">Исходное изображение</param>
        /// <param name="singFileName">Путь до каскада</param>
        /// <param name="sings">Список знаков на изображении</param>
        /// <param name="detectionTime">Время выполнения</param>
        public void Detect(IInputArray image, String singFileName, List <Rectangle> sings, out long detectionTime)
        {
            Stopwatch watch;

            using (InputArray iaImage = image.GetInputArray())
            {
                if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda)
                {
                    using (CudaCascadeClassifier sing = new CudaCascadeClassifier(singFileName))
                    {
                        sing.ScaleFactor   = 1.1;           //Коэфициент увеличения
                        sing.MinNeighbors  = 10;            //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог
                        sing.MinObjectSize = Size.Empty;    //Минимальный размер

                        watch = Stopwatch.StartNew();       //Таймер
                        //Конвентируем изображение в серый цвет, подготавливаем регион с возможными вхождениями знаков на изображении
                        using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image))
                            using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                                using (GpuMat region = new GpuMat())
                                {
                                    sing.DetectMultiScale(gpuGray, region);
                                    Rectangle[] singRegion = sing.Convert(region);
                                    sings.AddRange(singRegion);
                                }
                        watch.Stop();
                    }
                }
                else
                {
                    //Читаем HaarCascade
                    using (CascadeClassifier sing = new CascadeClassifier(singFileName))
                    {
                        watch = Stopwatch.StartNew();

                        using (UMat ugray = new UMat())
                        {
                            CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray);

                            //Приводим в норму яркость и повышаем контрастность
                            CvInvoke.EqualizeHist(ugray, ugray);

                            //Обнаруживаем знак на сером изображении и сохраняем местоположение в виде прямоугольника
                            Rectangle[] singsDetected = sing.DetectMultiScale(
                                ugray,              //Исходное изображение
                                1.1,                //Коэффициент увеличения изображения
                                10,                 //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог
                                new Size(20, 20));  //Минимальный размер

                            sings.AddRange(singsDetected);
                        }
                        watch.Stop();
                    }
                }
            }
            detectionTime = watch.ElapsedMilliseconds;
        }
Example #12
0
 public void TestInplaceNot()
 {
     if (CudaInvoke.HasCuda)
     {
         Image <Bgr, Byte>     img    = new Image <Bgr, byte>(300, 400);
         CudaImage <Bgr, Byte> gpuMat = new CudaImage <Bgr, byte>(img);
         CudaInvoke.BitwiseNot(gpuMat, gpuMat, null, null);
         Assert.IsTrue(gpuMat.Equals(new CudaImage <Bgr, Byte>(img.Not())));
     }
 }
    private void ProcessFrame(object sender, EventArgs e)
    {
        var mat = new Mat();

        this.webcam.Read(mat);

        CudaImage <Bgr, Byte> gpuImg = new CudaImage <Bgr, byte>();

        gpuImg.Upload(mat);
        CudaImage <Gray, Byte> grayImg = gpuImg.Convert <Gray, Byte>();
        GpuMat region = new GpuMat();

        haarCascade.DetectMultiScale(grayImg, region);
        Rectangle[] faceRegion = haarCascade.Convert(region);

        Rectangle face;

        if (faceRegion.Length > 0 && faceRegion[0].Width > 0)
        {
            if (!IsRegionValid(faceRegion[0]))
            {
                return;
            }

            face = faceRegion[0];
            float meterPerPxl = (userFaceSize / face.Width) / 100f;
            this._userPosition.x = -(face.X + (face.Width / 2) - (camWidth / 2)) * ((userFaceSize / face.Width) / 100);
            this._userPosition.y = -(face.Y + (face.Height / 2) - (camHeight / 2)) * ((userFaceSize / face.Width) / 100);
            this._userPosition.z = -camDistanceRatio * ((userFaceSize / face.Width) / 100);
            currentFace          = face;
            this.newFaceDetected = true;
        }
        else
        {
            currentFace.Width = -1;
        }

        /*if (webcamFeedbackEnabled) {
         *      var img = mat.ToImage<Bgr, byte>();
         *      for (int i = 0; i < faceRegion.Length; i++) {
         *              if (i == 0)
         *                      img.Draw(face, new Bgr(255, 255, 0), 4);
         *              else
         *                      img.Draw(faceRegion[i], new Bgr(0, 255, 255), 4);
         *      }
         *
         *      Dispatcher.InvokeAsync(() => {
         *              Debug.Log(img.Convert<Rgb, byte>().Bytes.Length);
         *              currentFrame.LoadRawTextureData(img.Convert<Rgb, byte>().Bytes);
         *              currentFrame.Apply();
         *              img.Dispose();
         *      });
         * }*/
    }
Example #14
0
        private static Bitmap ResizeBitmapWithCuda(Image <Bgr, Byte> sourceBM, ref Size newSize)
        {
            // Initialize Emgu Image object
            CudaImage <Bgr, Byte> img = new CudaImage <Bgr, Byte>(sourceBM);

            // Resize using liniear interpolation
            img.Resize(newSize, Inter.Linear);

            // Return .NET Bitmap object
            return(img.Bitmap);
        }
Example #15
0
        public static void Detect(IInputArray image, List <Rectangle> faces)
        {
            string faceFileName = @"./Resources/haarcascade_frontalface_default.xml";

            using (InputArray iaImage = image.GetInputArray())
            {
#if !(__IOS__ || NETFX_CORE)
                if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda)
                {
                    using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                    {
                        face.ScaleFactor   = 1.1;
                        face.MinNeighbors  = 10;
                        face.MinObjectSize = Size.Empty;
                        using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image))
                            using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                                using (GpuMat region = new GpuMat())
                                {
                                    face.DetectMultiScale(gpuGray, region);
                                    Rectangle[] faceRegion = face.Convert(region);
                                    faces.AddRange(faceRegion);
                                }
                    }
                }
                else
#endif
                {
                    //Read the HaarCascade objects
                    using (CascadeClassifier face = new CascadeClassifier(faceFileName))
                    {
                        using (UMat ugray = new UMat())
                        {
                            CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                            //normalizes brightness and increases contrast of the image
                            CvInvoke.EqualizeHist(ugray, ugray);

                            //Detect the faces  from the gray scale image and store the locations as rectangle
                            //The first dimensional is the channel
                            //The second dimension is the index of the rectangle in the specific channel
                            Rectangle[] facesDetected = face.DetectMultiScale(
                                ugray,
                                1.1,
                                10,
                                new Size(20, 20));

                            faces.AddRange(facesDetected);
                        }
                    }
                }
            }
        }
Example #16
0
        public void TestBilaterialFilter()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Bgr, Byte>      img       = new Image <Bgr, byte>("pedestrian.png");
                Image <Gray, byte>     gray      = img.Convert <Gray, Byte>();
                CudaImage <Gray, Byte> CudaImage = new CudaImage <Gray, byte>(gray);

                CudaImage <Gray, Byte> gpuBilaterial = new CudaImage <Gray, byte>(CudaImage.Size);
                CudaInvoke.BilateralFilter(CudaImage, gpuBilaterial, 7, 5, 5, CvEnum.BorderType.Default, null);

                //Emgu.CV.UI.ImageViewer.Show(gray.ConcateHorizontal(gpuBilaterial.ToImage()));
            }
        }
Example #17
0
        public void TestCudaPyr()
        {
            if (!CudaInvoke.HasCuda)
            {
                return;
            }
            Image <Gray, Byte> img = new Image <Gray, byte>(640, 480);

            img.SetRandUniform(new MCvScalar(), new MCvScalar(255, 255, 255));
            Image <Gray, Byte> down = img.PyrDown();
            Image <Gray, Byte> up   = down.PyrUp();

            CudaImage <Gray, Byte> gImg  = new CudaImage <Gray, byte>(img);
            CudaImage <Gray, Byte> gDown = new CudaImage <Gray, byte>(img.Size.Width >> 1, img.Size.Height >> 1);
            CudaImage <Gray, Byte> gUp   = new CudaImage <Gray, byte>(img.Size);

            CudaInvoke.PyrDown(gImg, gDown, null);
            CudaInvoke.PyrUp(gDown, gUp, null);

            CvInvoke.AbsDiff(down, gDown.ToImage(), down);
            CvInvoke.AbsDiff(up, gUp.ToImage(), up);
            double[] minVals, maxVals;
            Point[]  minLocs, maxLocs;
            down.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
            double maxVal = 0.0;

            for (int i = 0; i < maxVals.Length; i++)
            {
                if (maxVals[i] > maxVal)
                {
                    maxVal = maxVals[i];
                }
            }
            Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
            EmguAssert.IsTrue(maxVal <= 1.0);
            //Assert.LessOrEqual(maxVal, 1.0);

            up.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
            maxVal = 0.0;
            for (int i = 0; i < maxVals.Length; i++)
            {
                if (maxVals[i] > maxVal)
                {
                    maxVal = maxVals[i];
                }
            }
            Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
            EmguAssert.IsTrue(maxVal <= 1.0);
            //Assert.LessOrEqual(maxVal, 1.0);
        }
Example #18
0
        public void TestCountNonZero()
        {
            if (!CudaInvoke.HasCuda)
            {
                return;
            }

            //Mat m = new Mat(100, 200, Mat.Depth.Cv8U, 1);
            CudaImage <Gray, Byte> m = new CudaImage <Gray, Byte>(100, 200);

            m.SetTo(new MCvScalar(), null, null);
            EmguAssert.IsTrue(0 == CudaInvoke.CountNonZero(m));
            //Trace.WriteLine(String.Format("non zero count: {0}", ));
        }
        /// <summary>
        /// Find the pedestrian in the image
        /// </summary>
        /// <param name="image">The image</param>
        /// <param name="processingTime">The pedestrian detection time in milliseconds</param>
        /// <returns>The image with pedestrian highlighted.</returns>
        public static Image<Bgr, Byte> Find(Image<Bgr, Byte> image, out long processingTime)
        {
            Stopwatch watch;
            Rectangle[] regions;

            //check if there is a compatible GPU to run pedestrian detection
            if (CudaInvoke.HasCuda)
            {  //this is the GPU version
                using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8)))
                {
                    des.SetSVMDetector(des.GetDefaultPeopleDetector());

                    watch = Stopwatch.StartNew();
                    using (CudaImage<Bgr, Byte> gpuImg = new CudaImage<Bgr, byte>(image))
                    using (CudaImage<Bgra, Byte> gpuBgra = gpuImg.Convert<Bgra, Byte>())
                    using (VectorOfRect vr = new VectorOfRect())
                    {
                        CudaInvoke.CvtColor(gpuBgra, gpuBgra, ColorConversion.Bgr2Bgra);
                        des.DetectMultiScale(gpuBgra,vr);
                        regions = vr.ToArray();
                    }
                }
            }
            else
            {  //this is the CPU version
                using (HOGDescriptor des = new HOGDescriptor())
                {
                    des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
                    //load the image to umat so it will automatically use opencl is available
                    UMat umat = image.ToUMat();

                    watch = Stopwatch.StartNew();
                    //regions = des.DetectMultiScale(image);
                    MCvObjectDetection[] results = des.DetectMultiScale(umat);
                    regions = new Rectangle[results.Length];
                    for (int i = 0; i < results.Length; i++)
                        regions[i] = results[i].Rect;
                }
            }
            watch.Stop();

            processingTime = watch.ElapsedMilliseconds;

            foreach (Rectangle pedestrain in regions)
            {
                image.Draw(pedestrain, new Bgr(Color.Red), 1);
            }
            return image;
        }
Example #20
0
        public void TestClone()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, Byte> img = new Image <Gray, byte>(300, 400);
                img.SetRandUniform(new MCvScalar(0.0), new MCvScalar(255.0));

                using (CudaImage <Gray, Byte> gImg1 = new CudaImage <Gray, byte>(img))
                    using (CudaImage <Gray, Byte> gImg2 = gImg1.Clone(null))
                        using (Image <Gray, Byte> img2 = gImg2.ToImage())
                        {
                            Assert.IsTrue(img.Equals(img2));
                        }
            }
        }
Example #21
0
 public void TestCanny()
 {
     if (CudaInvoke.HasCuda)
     {
         using (Image <Bgr, Byte> image = new Image <Bgr, byte>("pedestrian.png"))
             using (CudaImage <Bgr, Byte> CudaImage = new CudaImage <Bgr, byte>(image))
                 using (CudaImage <Gray, Byte> gray = CudaImage.Convert <Gray, Byte>())
                     using (CudaImage <Gray, Byte> canny = new CudaImage <Gray, byte>(gray.Size))
                         using (CudaCannyEdgeDetector detector = new CudaCannyEdgeDetector(20, 100, 3, false))
                         {
                             detector.Detect(gray, canny);
                             //GpuInvoke.Canny(gray, canny, 20, 100, 3, false);
                             //ImageViewer.Show(canny);
                         }
     }
 }
Example #22
0
    public long DetectObjects(Mat image, List <Rectangle> objects)
    {
        //	Stopwatch watch;
        //	watch = Stopwatch.StartNew ();

        using (CudaImage <Gray, Byte> gpuImage = new CudaImage <Gray, byte> (image)) {
            using (GpuMat region = new GpuMat()) {
                _classifier.DetectMultiScale(gpuImage, region);
                Rectangle[] faceRegion = _classifier.Convert(region);
                objects.AddRange(faceRegion);
            }
        }
        //	watch.Stop();
        //	return watch.ElapsedMilliseconds;
        return(0);
    }
        IImage CudaDetect(IImage original, List <Rectangle> faces, List <Rectangle> eyes)
        {
            using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
                {
                    face.ScaleFactor   = 1.1;
                    face.MinNeighbors  = 10;
                    face.MinObjectSize = Size.Empty;
                    eye.ScaleFactor    = 1.1;
                    eye.MinNeighbors   = 10;
                    eye.MinObjectSize  = Size.Empty;
                    using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(original))
                        using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                            using (GpuMat region = new GpuMat())
                            {
                                face.DetectMultiScale(gpuGray, region);
                                Rectangle[] faceRegion = face.Convert(region);
                                faces.AddRange(faceRegion);
                                foreach (Rectangle f in faceRegion)
                                {
                                    using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                                    {
                                        //For some reason a clone is required.
                                        //Might be a bug of CudaCascadeClassifier in opencv
                                        using (CudaImage <Gray, Byte> clone = faceImg.Clone(null))
                                            using (GpuMat eyeRegionMat = new GpuMat())
                                            {
                                                eye.DetectMultiScale(clone, eyeRegionMat);
                                                Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
                                                foreach (Rectangle e in eyeRegion)
                                                {
                                                    Rectangle eyeRect = e;
                                                    eyeRect.Offset(f.X, f.Y);
                                                    eyes.Add(eyeRect);
                                                }
                                            }
                                    }
                                }
                            }
                }
            IImage copy = CopyAndDraw(original, faces.ToArray());

            copy = CopyAndDraw(copy, eyes.ToArray());
            return(copy);
            //return eyes;
        }
Example #24
0
        public void TestClahe()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, Byte>     image      = EmguAssert.LoadImage <Gray, Byte>("pedestrian.png");
                CudaImage <Gray, Byte> cudaImage  = new CudaImage <Gray, byte>(image);
                CudaImage <Gray, Byte> cudaResult = new CudaImage <Gray, byte>(cudaImage.Size);

                using (CudaClahe clahe = new CudaClahe(40.0, new Size(8, 8)))
                {
                    Image <Gray, Byte> result = new Image <Gray, byte>(cudaResult.Size);
                    clahe.Apply(cudaImage, cudaResult, null);
                    cudaResult.Download(result);
                    //Emgu.CV.UI.ImageViewer.Show(image.ConcateHorizontal(result));
                }
            }
        }
Example #25
0
 public void TestCudaFlip()
 {
     if (CudaInvoke.HasCuda)
     {
         using (Image <Bgr, Byte> img1 = new Image <Bgr, byte>(1200, 640))
         {
             img1.SetRandUniform(new MCvScalar(0, 0, 0), new MCvScalar(255, 255, 255));
             using (Image <Bgr, Byte> img1Flip = img1.Flip(CvEnum.FlipType.Horizontal | CvEnum.FlipType.Vertical))
                 using (CudaImage <Bgr, Byte> cudaImage = new CudaImage <Bgr, byte>(img1))
                     using (CudaImage <Bgr, Byte> cudaFlip = new CudaImage <Bgr, byte>(img1.Size))
                     {
                         CudaInvoke.Flip(cudaImage, cudaFlip, CvEnum.FlipType.Horizontal | CvEnum.FlipType.Vertical, null);
                         cudaFlip.Download(img1);
                         Assert.IsTrue(img1.Equals(img1Flip));
                     }
         }
     }
 }
Example #26
0
        public void TestColorConvert()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Bgr, Byte> img = new Image <Bgr, byte>(300, 400);
                img.SetRandUniform(new MCvScalar(0.0, 0.0, 0.0), new MCvScalar(255.0, 255.0, 255.0));
                Image <Gray, Byte> imgGray = img.Convert <Gray, Byte>();
                Image <Hsv, Byte>  imgHsv  = img.Convert <Hsv, Byte>();

                CudaImage <Bgr, Byte>  gpuImg     = new CudaImage <Bgr, Byte>(img);
                CudaImage <Gray, Byte> gpuImgGray = gpuImg.Convert <Gray, Byte>();
                CudaImage <Hsv, Byte>  gpuImgHsv  = gpuImg.Convert <Hsv, Byte>();

                Assert.IsTrue(gpuImgGray.Equals(new CudaImage <Gray, Byte>(imgGray)));
                Assert.IsTrue(gpuImgHsv.ToImage().Equals(imgHsv));
                Assert.IsTrue(gpuImgHsv.Equals(new CudaImage <Hsv, Byte>(imgHsv)));
            }
        }
Example #27
0
        public List <Rectangle> FilterImage(Image <Bgr, byte> currentFrame)
        {
            using (GpuMat faceRegionMat = new GpuMat())
            {
                List <Rectangle>       rectangles = new List <Rectangle>();
                CudaImage <Gray, byte> cudaIMG    = new CudaImage <Gray, byte>(currentFrame.Convert <Gray, byte>());
                foreach (CudaCascadeClassifier F in filters)
                {
                    F.DetectMultiScale(cudaIMG.Convert <Gray, byte>(), faceRegionMat);
                    Rectangle[] detectedSubjects = F.Convert(faceRegionMat);
                    foreach (Rectangle R in detectedSubjects)
                    {
                        rectangles.Add(R);
                    }
                }

                return(rectangles);
            }
        }
Example #28
0
        public Rectangle[] FindFaces(Mat frame, ref int type)
        {
            if (CudaInvoke.HasCuda && Global.useCuda)
            {
                using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(frame))
                    using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>())
                        using (GpuMat region = new GpuMat())
                        {
                            cuda_ccFace.DetectMultiScale(gpuGray, region);
                            Rectangle[] faces = cuda_ccFace.Convert(region);

                            if (faces.Length == 0)
                            {
                                cuda_ccSideFace.DetectMultiScale(gpuGray, region);
                                faces = cuda_ccSideFace.Convert(region);
                                if (faces.Length == 0)
                                {
                                    Image <Gray, byte> grayImage = gpuGray.ToImage();
                                    faces = ccAltFace.DetectMultiScale(grayImage, 1.02, 5, cuda_ccFace.MinObjectSize);
                                    if (faces.Length != 0)
                                    {
                                        type = 3;
                                    }
                                }
                                else
                                {
                                    type = 2;
                                }
                            }
                            else
                            {
                                type = 1;
                            }

                            return(faces);
                        }
            }
            else
            {
                return(FindFaces_WithoutGPU(frame, ref type));
            }
            //return null;
        }
Example #29
0
        public void TestResizeGray()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, Byte> img = new Image <Gray, byte>(300, 400);
                img.SetRandUniform(new MCvScalar(0.0), new MCvScalar(255.0));

                //Image<Gray, Byte> img = new Image<Gray, byte>("airplane.jpg");

                Image <Gray, Byte>     small       = img.Resize(100, 200, Emgu.CV.CvEnum.Inter.Linear);
                CudaImage <Gray, Byte> gpuImg      = new CudaImage <Gray, byte>(img);
                CudaImage <Gray, byte> smallGpuImg = new CudaImage <Gray, byte>(small.Size);
                CudaInvoke.Resize(gpuImg, smallGpuImg, small.Size);
                Image <Gray, Byte> diff = smallGpuImg.ToImage().AbsDiff(small);
                //ImageViewer.Show(smallGpuImg.ToImage());
                //ImageViewer.Show(small);
                //Assert.IsTrue(smallGpuImg.ToImage().Equals(small));
            }
        }
Example #30
0
        public void TestCudaWarpPerspective()
        {
            if (!CudaInvoke.HasCuda)
            {
                return;
            }
            Matrix <float> transformation = new Matrix <float>(3, 3);

            transformation.SetIdentity();

            Image <Gray, byte> image = new Image <Gray, byte>(480, 320);

            image.SetRandNormal(new MCvScalar(), new MCvScalar(255));

            using (GpuMat cudaImage = new GpuMat(image))
                using (CudaImage <Gray, Byte> resultCudaImage = new CudaImage <Gray, byte>())
                {
                    CudaInvoke.WarpPerspective(cudaImage, resultCudaImage, transformation, cudaImage.Size, CvEnum.Inter.Cubic, CvEnum.BorderType.Default, new MCvScalar(), null);
                }
        }
Example #31
0
        public void TestCudaBroxOpticalFlow()
        {
            if (!CudaInvoke.HasCuda)
            {
                return;
            }
            Image <Gray, Byte> prevImg, currImg;

            AutoTestVarious.OpticalFlowImage(out prevImg, out currImg);
            Mat flow = new Mat();
            CudaBroxOpticalFlow opticalflow = new CudaBroxOpticalFlow();

            using (CudaImage <Gray, float> prevGpu = new CudaImage <Gray, float>(prevImg.Convert <Gray, float>()))
                using (CudaImage <Gray, float> currGpu = new CudaImage <Gray, float>(currImg.Convert <Gray, float>()))
                    using (GpuMat flowGpu = new GpuMat())
                    {
                        opticalflow.Calc(prevGpu, currGpu, flowGpu);

                        flowGpu.Download(flow);
                    }
        }
Example #32
0
        public void TestCudaPyrLKOpticalFlow()
        {
            if (!CudaInvoke.HasCuda)
            {
                return;
            }
            Image <Gray, Byte> prevImg, currImg;

            AutoTestVarious.OpticalFlowImage(out prevImg, out currImg);
            Mat flow = new Mat();
            CudaDensePyrLKOpticalFlow opticalflow = new CudaDensePyrLKOpticalFlow(new Size(21, 21), 3, 30, false);

            using (CudaImage <Gray, Byte> prevGpu = new CudaImage <Gray, byte>(prevImg))
                using (CudaImage <Gray, byte> currGpu = new CudaImage <Gray, byte>(currImg))
                    using (GpuMat flowGpu = new GpuMat())
                    {
                        opticalflow.Calc(prevGpu, currGpu, flowGpu);

                        flowGpu.Download(flow);
                    }
        }
Example #33
0
      public void TestColorConvert()
      {
         if (CudaInvoke.HasCuda)
         {
            Image<Bgr, Byte> img = new Image<Bgr, byte>(300, 400);
            img.SetRandUniform(new MCvScalar(0.0, 0.0, 0.0), new MCvScalar(255.0, 255.0, 255.0));
            Image<Gray, Byte> imgGray = img.Convert<Gray, Byte>();
            Image<Hsv, Byte> imgHsv = img.Convert<Hsv, Byte>();

            CudaImage<Bgr, Byte> gpuImg = new CudaImage<Bgr, Byte>(img);
            CudaImage<Gray, Byte> gpuImgGray = gpuImg.Convert<Gray, Byte>();
            CudaImage<Hsv, Byte> gpuImgHsv = gpuImg.Convert<Hsv, Byte>();

            Assert.IsTrue(gpuImgGray.Equals(new CudaImage<Gray, Byte>(imgGray)));
            Assert.IsTrue(gpuImgHsv.ToImage().Equals(imgHsv));
            Assert.IsTrue(gpuImgHsv.Equals(new CudaImage<Hsv, Byte>(imgHsv)));
         }
      }
Example #34
0
 public void TestInplaceNot()
 {
    if (CudaInvoke.HasCuda)
    {
       Image<Bgr, Byte> img = new Image<Bgr, byte>(300, 400);
       CudaImage<Bgr, Byte> gpuMat = new CudaImage<Bgr, byte>(img);
       CudaInvoke.BitwiseNot(gpuMat, gpuMat, null, null);
       Assert.IsTrue(gpuMat.Equals(new CudaImage<Bgr, Byte>(img.Not())));
    }
 }
Example #35
0
      public static void Detect(
         IInputArray image, String faceFileName, String eyeFileName,
         List<Rectangle> faces, List<Rectangle> eyes,
         out long detectionTime)
      {
         Stopwatch watch;

         using (InputArray iaImage = image.GetInputArray())
         {

#if !(__IOS__ || NETFX_CORE)
            if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda)
            {
               using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
               using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
               {
                  face.ScaleFactor = 1.1;
                  face.MinNeighbors = 10;
                  face.MinObjectSize = Size.Empty;
                  eye.ScaleFactor = 1.1;
                  eye.MinNeighbors = 10;
                  eye.MinObjectSize = Size.Empty;
                  watch = Stopwatch.StartNew();
                  using (CudaImage<Bgr, Byte> gpuImage = new CudaImage<Bgr, byte>(image))
                  using (CudaImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
                  using (GpuMat region = new GpuMat())
                  {
                     face.DetectMultiScale(gpuGray, region);
                     Rectangle[] faceRegion = face.Convert(region);
                     faces.AddRange(faceRegion);
                     foreach (Rectangle f in faceRegion)
                     {
                        using (CudaImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                        {
                           //For some reason a clone is required.
                           //Might be a bug of CudaCascadeClassifier in opencv
                           using (CudaImage<Gray, Byte> clone = faceImg.Clone(null))
                           using (GpuMat eyeRegionMat = new GpuMat())
                           {
                              eye.DetectMultiScale(clone, eyeRegionMat);
                              Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
                              foreach (Rectangle e in eyeRegion)
                              {
                                 Rectangle eyeRect = e;
                                 eyeRect.Offset(f.X, f.Y);
                                 eyes.Add(eyeRect);
                              }
                           }
                        }
                     }
                  }
                  watch.Stop();
               }
            }
            else
#endif
            {
               //Read the HaarCascade objects
               using (CascadeClassifier face = new CascadeClassifier(faceFileName))
               using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
               {
                  watch = Stopwatch.StartNew();

                  using (UMat ugray = new UMat())
                  {
                     CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                     //normalizes brightness and increases contrast of the image
                     CvInvoke.EqualizeHist(ugray, ugray);

                     //Detect the faces  from the gray scale image and store the locations as rectangle
                     //The first dimensional is the channel
                     //The second dimension is the index of the rectangle in the specific channel
                     Rectangle[] facesDetected = face.DetectMultiScale(
                        ugray,
                        1.1,
                        10,
                        new Size(20, 20));

                     faces.AddRange(facesDetected);

                     foreach (Rectangle f in facesDetected)
                     {
                        //Get the region of interest on the faces
                        using (UMat faceRegion = new UMat(ugray, f))
                        {
                           Rectangle[] eyesDetected = eye.DetectMultiScale(
                              faceRegion,
                              1.1,
                              10,
                              new Size(20, 20));

                           foreach (Rectangle e in eyesDetected)
                           {
                              Rectangle eyeRect = e;
                              eyeRect.Offset(f.X, f.Y);
                              eyes.Add(eyeRect);
                           }
                        }
                     }
                  }
                  watch.Stop();
               }
            }
            detectionTime = watch.ElapsedMilliseconds;
         }
      }
Example #36
0
      public void TestCudaFASTDetector()
      {
         if (!CudaInvoke.HasCuda)
            return;
         using (Image<Bgr, Byte> img = new Image<Bgr, byte>("box.png"))
         using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(img))
         using (CudaImage<Gray, Byte> grayCudaImage = CudaImage.Convert<Gray, Byte>())
         using (CudaFastFeatureDetector featureDetector = new CudaFastFeatureDetector(10, true, FastDetector.DetectorType.Type9_16, 1000 ))
         using (VectorOfKeyPoint kpts = new VectorOfKeyPoint())
         using (GpuMat keyPointsMat = new GpuMat())
         {
            featureDetector.DetectAsync(grayCudaImage, keyPointsMat);
            featureDetector.Convert(keyPointsMat, kpts);
            //featureDetector.DetectKeyPointsRaw(grayCudaImage, null, keyPointsMat);

            //featureDetector.DownloadKeypoints(keyPointsMat, kpts);

            foreach (MKeyPoint kpt in kpts.ToArray())
            {
               img.Draw(new CircleF(kpt.Point, 3.0f), new Bgr(0, 255, 0), 1);
            }

            //ImageViewer.Show(img);
         }
      }
Example #37
0
      public void TestGpuMatAdd()
      {
         if (CudaInvoke.HasCuda)
         {
            int repeat = 1000;
            Image<Gray, Byte> img1 = new Image<Gray, byte>(1200, 640);
            Image<Gray, Byte> img2 = new Image<Gray, byte>(img1.Size);
            img1.SetRandUniform(new MCvScalar(0, 0, 0), new MCvScalar(255, 255, 255));
            img2.SetRandUniform(new MCvScalar(0, 0, 0), new MCvScalar(255, 255, 255));
            Image<Gray, Byte> cpuImgSum = new Image<Gray, byte>(img1.Size);
            Stopwatch watch = Stopwatch.StartNew();
            for (int i = 0; i < repeat; i++)
               CvInvoke.Add(img1, img2, cpuImgSum, null, CvEnum.DepthType.Cv8U);
            watch.Stop();
            Trace.WriteLine(String.Format("CPU processing time: {0}ms", (double)watch.ElapsedMilliseconds / repeat));

            watch.Reset(); watch.Start();
            CudaImage<Gray, Byte> gpuImg1 = new CudaImage<Gray, byte>(img1);
            CudaImage<Gray, Byte> gpuImg2 = new CudaImage<Gray, byte>(img2);
            CudaImage<Gray, Byte> gpuImgSum = new CudaImage<Gray, byte>(gpuImg1.Size);
            Stopwatch watch2 = Stopwatch.StartNew();
            for (int i = 0; i < repeat; i++)
               CudaInvoke.Add(gpuImg1, gpuImg2, gpuImgSum);
            watch2.Stop();
            Image<Gray, Byte> cpuImgSumFromGpu = gpuImgSum.ToImage();
            watch.Stop();
            Trace.WriteLine(String.Format("Core GPU processing time: {0}ms", (double)watch2.ElapsedMilliseconds / repeat));
            //Trace.WriteLine(String.Format("Total GPU processing time: {0}ms", (double)watch.ElapsedMilliseconds/repeat));

            Assert.IsTrue(cpuImgSum.Equals(cpuImgSumFromGpu));
         }
      }
Example #38
0
      public void TestCudaRemap()
      {
         if (!CudaInvoke.HasCuda)
            return;
         Image<Gray, float> xmap = new Image<Gray, float>(2, 2);
         xmap.Data[0, 0, 0] = 0; xmap.Data[0, 1, 0] = 0;
         xmap.Data[1, 0, 0] = 1; xmap.Data[1, 1, 0] = 1;
         Image<Gray, float> ymap = new Image<Gray, float>(2, 2);
         ymap.Data[0, 0, 0] = 0; ymap.Data[0, 1, 0] = 1;
         ymap.Data[1, 0, 0] = 0; ymap.Data[1, 1, 0] = 1;

         Image<Gray, Byte> image = new Image<Gray, byte>(2, 2);
         image.SetRandNormal(new MCvScalar(), new MCvScalar(255));

         using (CudaImage<Gray, Byte> CudaImage = new CudaImage<Gray, byte>(image))
         using (CudaImage<Gray, float> xCudaImage = new CudaImage<Gray, float>(xmap))
         using (CudaImage<Gray, float> yCudaImage = new CudaImage<Gray, float>(ymap))
         using (CudaImage<Gray, Byte> remapedImage = new CudaImage<Gray,byte>(CudaImage.Size))
         {
            CudaInvoke.Remap(CudaImage, remapedImage, xCudaImage, yCudaImage, CvEnum.Inter.Cubic, CvEnum.BorderType.Default, new MCvScalar(), null);
         }
      }
Example #39
0
      public void TestCudaPyrLKOpticalFlow()
      {
         if (!CudaInvoke.HasCuda)
            return;
         Image<Gray, Byte> prevImg, currImg;
         AutoTestVarious.OpticalFlowImage(out prevImg, out currImg);
         Mat flow = new Mat();
         CudaDensePyrLKOpticalFlow opticalflow = new CudaDensePyrLKOpticalFlow(new Size(21, 21), 3, 30, false);
         using (CudaImage<Gray, Byte> prevGpu = new CudaImage<Gray, byte>(prevImg))
         using (CudaImage<Gray, byte> currGpu = new CudaImage<Gray, byte>(currImg))
         using (GpuMat flowGpu = new GpuMat())
         {
            opticalflow.Calc(prevGpu, currGpu, flowGpu);

            flowGpu.Download(flow);
         }
         
      }
Example #40
0
      public void TestCudaBroxOpticalFlow()
      {
         if (!CudaInvoke.HasCuda)
            return;
         Image<Gray, Byte> prevImg, currImg;
         AutoTestVarious.OpticalFlowImage(out prevImg, out currImg);
         Mat flow = new Mat();
         CudaBroxOpticalFlow opticalflow = new CudaBroxOpticalFlow();
         using (CudaImage<Gray, float> prevGpu = new CudaImage<Gray, float>(prevImg.Convert<Gray, float>()))
         using (CudaImage<Gray, float> currGpu = new CudaImage<Gray, float>(currImg.Convert<Gray, float>()))
         using (GpuMat flowGpu = new GpuMat())
         {
            opticalflow.Calc(prevGpu, currGpu, flowGpu);

            flowGpu.Download(flow);
         }
      }
Example #41
0
      public void TestCountNonZero()
      {
         if (!CudaInvoke.HasCuda)
            return;

         //Mat m = new Mat(100, 200, Mat.Depth.Cv8U, 1);
         CudaImage<Gray, Byte> m = new CudaImage<Gray, Byte>(100, 200);
         m.SetTo(new MCvScalar(), null, null);
         EmguAssert.IsTrue(0 == CudaInvoke.CountNonZero(m));
         //Trace.WriteLine(String.Format("non zero count: {0}", ));
      }
Example #42
0
      public void TestClahe()
      {
         if (CudaInvoke.HasCuda)
         {
            Image<Gray, Byte> image = EmguAssert.LoadImage<Gray, Byte>("pedestrian.png");
            CudaImage<Gray, Byte> cudaImage = new CudaImage<Gray, byte>(image);
            CudaImage<Gray, Byte> cudaResult = new CudaImage<Gray, byte>(cudaImage.Size);

            using (CudaClahe clahe = new CudaClahe(40.0, new Size(8, 8)))
            {
               Image<Gray, Byte> result = new Image<Gray, byte>(cudaResult.Size);
               clahe.Apply(cudaImage, cudaResult, null);
               cudaResult.Download(result);
               //Emgu.CV.UI.ImageViewer.Show(image.ConcateHorizontal(result));
            }
         }
      }
        public List<Face> FindFaces(Image<Bgr, byte> image, string faceFileName, string eyeFileName, double scale, int neighbors, int minSize)
        {
            List<Face> faces = new List<Face>();
            List<Rectangle> facesRect = new List<Rectangle>();
            List<Rectangle> eyesRect = new List<Rectangle>();
            try
            {
                //Console.WriteLine(" FaceDetectGPU FindFaces faceFileName=" + faceFileName + " cuda = " + CudaInvoke.HasCuda);
                using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
                {
                    using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(image))
                    using (CudaImage<Gray, Byte> CudaGray = CudaImage.Convert<Gray, Byte>())
                    using (GpuMat region = new GpuMat())
                    {

                        face.DetectMultiScale(CudaGray, region);
                        Rectangle[] faceRegion = face.Convert(region);
                        facesRect.AddRange(faceRegion);
                        foreach (Rectangle f in faceRegion)
                        {
                            using (CudaImage<Gray, Byte> faceImg = CudaGray.GetSubRect(f))
                            {
                                using (CudaImage<Gray, Byte> clone = faceImg.Clone(null))
                                {
                                    Face facemodel = new Face();
                                    eyesRect = new List<Rectangle>(FindEyes(eyeFileName, clone));
                                    if (eyesRect != null)
                                    {
                                        facemodel.EyesRects = eyesRect;
                                        facemodel.EyesCount = eyesRect.Count;
                                    }
                                    else
                                    {
                                        continue;
                                    }
                                    facemodel.FaceImage = clone.Bitmap;
                                    facemodel.Height = facemodel.FaceImage.Height;
                                    facemodel.Width = facemodel.FaceImage.Width;
                                    facemodel.FaceRect = f;
                                    facemodel.FramePosX = f.X;
                                    facemodel.FramePosY = f.Y;
                                    facemodel.ImageFrameSize = image.Size;

                                    Gray avgf = new Gray();
                                    MCvScalar avstd = new MCvScalar();
                                    clone.ToImage().AvgSdv(out avgf, out avstd);
                                    facemodel.StdDev = avstd.V0;
                                    faces.Add(facemodel);
                                    if (facemodel.FaceScore > 39)
                                        Console.WriteLine("FaceDetect USING gpuCUDA Add faceModel" + facemodel.FaceScore);

                                    break;
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception cudaerrJones)
            {
                Console.WriteLine("cudaerrJones = " + cudaerrJones);
            }

            return faces;
        }
Example #44
0
      public void TestHOG1()
      {
         if (CudaInvoke.HasCuda)
         {
            using (CudaHOG hog = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8,8), 9))
            using (Mat pedestrianDescriptor = hog.GetDefaultPeopleDetector())
            using (Image<Bgr, Byte> image = new Image<Bgr, byte>("pedestrian.png"))
            {
               hog.SetSVMDetector(pedestrianDescriptor);
               //hog.GroupThreshold = 0;
               Stopwatch watch = Stopwatch.StartNew();
               Rectangle[] rects;
               using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(image))
               using (CudaImage<Bgra, Byte> gpuBgra = CudaImage.Convert<Bgra, Byte>())
               using (VectorOfRect vRect = new VectorOfRect())
               {
                  hog.DetectMultiScale(gpuBgra, vRect);
                  rects = vRect.ToArray();
               }
               watch.Stop();

               Assert.AreEqual(1, rects.Length);

               foreach (Rectangle rect in rects)
                  image.Draw(rect, new Bgr(Color.Red), 1);
               Trace.WriteLine(String.Format("HOG detection time: {0} ms", watch.ElapsedMilliseconds));

               //ImageViewer.Show(image, String.Format("Detection Time: {0}ms", watch.ElapsedMilliseconds));
            }
         }
      }
Example #45
0
      public void TestClone()
      {
         if (CudaInvoke.HasCuda)
         {
            Image<Gray, Byte> img = new Image<Gray, byte>(300, 400);
            img.SetRandUniform(new MCvScalar(0.0), new MCvScalar(255.0));

            using (CudaImage<Gray, Byte> gImg1 = new CudaImage<Gray, byte>(img))
            using (CudaImage<Gray, Byte> gImg2 = gImg1.Clone(null))
            using (Image<Gray, Byte> img2 = gImg2.ToImage())
            {
               Assert.IsTrue(img.Equals(img2));
            }
         }
      }
Example #46
0
      public void TestResizeGray()
      {
         if (CudaInvoke.HasCuda)
         {
            Image<Gray, Byte> img = new Image<Gray, byte>(300, 400);
            img.SetRandUniform(new MCvScalar(0.0), new MCvScalar(255.0));

            //Image<Gray, Byte> img = new Image<Gray, byte>("airplane.jpg");

            Image<Gray, Byte> small = img.Resize(100, 200, Emgu.CV.CvEnum.Inter.Linear);
            CudaImage<Gray, Byte> gpuImg = new CudaImage<Gray, byte>(img);
            CudaImage<Gray, byte> smallGpuImg = new CudaImage<Gray, byte>(small.Size);
            CudaInvoke.Resize(gpuImg, smallGpuImg, small.Size);
            Image<Gray, Byte> diff = smallGpuImg.ToImage().AbsDiff(small);
            //ImageViewer.Show(smallGpuImg.ToImage());
            //ImageViewer.Show(small);
            //Assert.IsTrue(smallGpuImg.ToImage().Equals(small));
         }
      }
Example #47
0
      public void TestCudaWarpPerspective()
      {
         if (!CudaInvoke.HasCuda)
            return;
         Matrix<float> transformation = new Matrix<float>(3, 3);
         transformation.SetIdentity();

         Image<Gray, byte> image = new Image<Gray, byte>(480, 320);
         image.SetRandNormal(new MCvScalar(), new MCvScalar(255));

         using (GpuMat cudaImage = new GpuMat(image))
         using (CudaImage<Gray, Byte> resultCudaImage = new CudaImage<Gray, byte>())
         {
            CudaInvoke.WarpPerspective(cudaImage, resultCudaImage, transformation, cudaImage.Size, CvEnum.Inter.Cubic, CvEnum.BorderType.Default, new MCvScalar(), null);
         }
      }
Example #48
0
 public void TestCanny()
 {
    if (CudaInvoke.HasCuda)
    {
       using (Image<Bgr, Byte> image = new Image<Bgr, byte>("pedestrian.png"))
       using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(image))
       using (CudaImage<Gray, Byte> gray = CudaImage.Convert<Gray, Byte>())
       using (CudaImage<Gray, Byte> canny = new CudaImage<Gray,byte>(gray.Size))
       using (CudaCannyEdgeDetector detector = new CudaCannyEdgeDetector(20, 100, 3, false))
       {
          detector.Detect(gray, canny);
          //GpuInvoke.Canny(gray, canny, 20, 100, 3, false);
          //ImageViewer.Show(canny);
       }
    }
 }
Example #49
0
        public static void Detect(
        Mat image, String faceFileName, String eyeFileName, 
        List<Rectangle> faces, List<Rectangle> eyes, 
        bool tryUseCuda, bool tryUseOpenCL,
        out long detectionTime)
        {
            Stopwatch watch;

             #if !(IOS || NETFX_CORE)
             if (tryUseCuda && CudaInvoke.HasCuda)
             {
            using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName))
            using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName))
            {
               face.ScaleFactor = 1.1;
               face.MinNeighbors = 10;
               face.MinObjectSize = Size.Empty;
               eye.ScaleFactor = 1.1;
               eye.MinNeighbors = 10;
               eye.MinObjectSize = Size.Empty;
               watch = Stopwatch.StartNew();
               using (CudaImage<Bgr, Byte> gpuImage = new CudaImage<Bgr, byte>(image))
               using (CudaImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
               using (GpuMat region = new GpuMat())
               {
                  face.DetectMultiScale(gpuGray, region);
                  Rectangle[] faceRegion = face.Convert(region);
                  faces.AddRange(faceRegion);
                  foreach (Rectangle f in faceRegion)
                  {
                     using (CudaImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                     {
                        //For some reason a clone is required.
                        //Might be a bug of CudaCascadeClassifier in opencv
                        using (CudaImage<Gray, Byte> clone = faceImg.Clone(null))
                        using (GpuMat eyeRegionMat = new GpuMat())
                        {
                           eye.DetectMultiScale(clone, eyeRegionMat);
                           Rectangle[] eyeRegion = eye.Convert(eyeRegionMat);
                           foreach (Rectangle e in eyeRegion)
                           {
                              Rectangle eyeRect = e;
                              eyeRect.Offset(f.X, f.Y);
                              eyes.Add(eyeRect);
                           }
                        }
                     }
                  }
               }
               watch.Stop();
            }
             }
             else
             #endif
             {
            //Many opencl functions require opencl compatible gpu devices.
            //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented
            //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices).
            CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice;

            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            //using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
            {
               watch = Stopwatch.StartNew();
               using (UMat ugray = new UMat())
               {
                  CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                  //normalizes brightness and increases contrast of the image
                  CvInvoke.EqualizeHist(ugray, ugray);

                  //Detect the faces  from the gray scale image and store the locations as rectangle
                  //The first dimensional is the channel
                  //The second dimension is the index of the rectangle in the specific channel
                  Rectangle[] facesDetected = face.DetectMultiScale(
                     ugray,
                     1.1,
                     2);

                  faces.AddRange(facesDetected);

                  //foreach (Rectangle f in facesDetected)
                  //{
                  //   //Get the region of interest on the faces
                  //   using (UMat faceRegion = new UMat(ugray, f))
                  //   {
                  //      Rectangle[] eyesDetected = eye.DetectMultiScale(
                  //         faceRegion,
                  //         1.1,
                  //         10,
                  //         new Size(20, 20));

                  //      foreach (Rectangle e in eyesDetected)
                  //      {
                  //         Rectangle eyeRect = e;
                  //         eyeRect.Offset(f.X, f.Y);
                  //         eyes.Add(eyeRect);
                  //      }
                  //   }
                  //}
               }
               watch.Stop();
            }
             }
             detectionTime = watch.ElapsedMilliseconds;
        }
Example #50
0
 public void TestCudaFlip()
 {
    if (CudaInvoke.HasCuda)
    {
       using (Image<Bgr, Byte> img1 = new Image<Bgr, byte>(1200, 640))
       {
          img1.SetRandUniform(new MCvScalar(0, 0, 0), new MCvScalar(255, 255, 255));
          using (Image<Bgr, Byte> img1Flip = img1.Flip(CvEnum.FlipType.Horizontal | CvEnum.FlipType.Vertical))
          using (CudaImage<Bgr, Byte> cudaImage = new CudaImage<Bgr, byte>(img1))
          using (CudaImage<Bgr, Byte> cudaFlip = new CudaImage<Bgr,byte>(img1.Size))
          {
             CudaInvoke.Flip(cudaImage, cudaFlip, CvEnum.FlipType.Horizontal | CvEnum.FlipType.Vertical, null);
             cudaFlip.Download(img1);
             Assert.IsTrue(img1.Equals(img1Flip));
          }
       }
    }
 }
Example #51
0
      public void TestResizeBgr()
      {
         if (CudaInvoke.HasCuda)
         {
            Image<Bgr, Byte> img = new Image<Bgr, byte>("pedestrian.png");
            //img.SetRandUniform(new MCvScalar(0.0, 0.0, 0.0), new MCvScalar(255.0, 255.0, 255.0));

            Size size = new Size(100, 200);

            CudaImage<Bgr, Byte> cudaImg = new CudaImage<Bgr, byte>(img);
            CudaImage<Bgr, byte> smallCudaImg = new CudaImage<Bgr, byte>(size);

            CudaInvoke.Resize(cudaImg, smallCudaImg, size);
            Image<Bgr, Byte> smallCpuImg = img.Resize(size.Width, size.Height, Emgu.CV.CvEnum.Inter.Linear);


            Image<Bgr, Byte> diff = smallCudaImg.ToImage().AbsDiff(smallCpuImg);
            //TODO: Check why they are not an exact match
            //Assert.IsTrue(diff.CountNonzero()[0] == 0);
            //ImageViewer.Show(smallGpuImg.ToImage());
            //ImageViewer.Show(small);
         }
      }
Example #52
0
      public void TestBilaterialFilter()
      {
         
         if (CudaInvoke.HasCuda)
         {
            Image<Bgr, Byte> img = new Image<Bgr, byte>("pedestrian.png");
            Image<Gray, byte> gray = img.Convert<Gray, Byte>();
            CudaImage<Gray, Byte> CudaImage = new CudaImage<Gray, byte>(gray);
            
            CudaImage<Gray, Byte> gpuBilaterial = new CudaImage<Gray, byte>(CudaImage.Size);
            CudaInvoke.BilateralFilter(CudaImage, gpuBilaterial, 7, 5, 5, CvEnum.BorderType.Default, null);

            //Emgu.CV.UI.ImageViewer.Show(gray.ConcateHorizontal(gpuBilaterial.ToImage()));
         }
      }
Example #53
0
      public void TestCudaPyr()
      {
         if (!CudaInvoke.HasCuda)
            return;
         Image<Gray, Byte> img = new Image<Gray, byte>(640, 480);
         img.SetRandUniform(new MCvScalar(), new MCvScalar(255, 255, 255));
         Image<Gray, Byte> down = img.PyrDown();
         Image<Gray, Byte> up = down.PyrUp();

         CudaImage<Gray, Byte> gImg = new CudaImage<Gray, byte>(img);
         CudaImage<Gray, Byte> gDown = new CudaImage<Gray, byte>(img.Size.Width >> 1, img.Size.Height >> 1);
         CudaImage<Gray, Byte> gUp = new CudaImage<Gray, byte>(img.Size);
         CudaInvoke.PyrDown(gImg, gDown, null);
         CudaInvoke.PyrUp(gDown, gUp, null);

         CvInvoke.AbsDiff(down, gDown.ToImage(), down);
         CvInvoke.AbsDiff(up, gUp.ToImage(), up);
         double[] minVals, maxVals;
         Point[] minLocs, maxLocs;
         down.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
         double maxVal = 0.0;
         for (int i = 0; i < maxVals.Length; i++)
         {
            if (maxVals[i] > maxVal)
               maxVal = maxVals[i];
         }
         Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
         Assert.LessOrEqual(maxVal, 1.0);

         up.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
         maxVal = 0.0;
         for (int i = 0; i < maxVals.Length; i++)
         {
            if (maxVals[i] > maxVal)
               maxVal = maxVals[i];
         }
         Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
         Assert.LessOrEqual(maxVal, 1.0);
      }