public void TestHOG1() { if (CudaInvoke.HasCuda) { using (CudaHOGDescriptor hog = new CudaHOGDescriptor()) using (Image <Bgr, Byte> image = new Image <Bgr, byte>("pedestrian.png")) { float[] pedestrianDescriptor = CudaHOGDescriptor.GetDefaultPeopleDetector(); hog.SetSVMDetector(pedestrianDescriptor); Stopwatch watch = Stopwatch.StartNew(); Rectangle[] rects; using (CudaImage <Bgr, Byte> CudaImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Bgra, Byte> gpuBgra = CudaImage.Convert <Bgra, Byte>()) rects = hog.DetectMultiScale(gpuBgra); watch.Stop(); Assert.AreEqual(1, rects.Length); foreach (Rectangle rect in rects) { image.Draw(rect, new Bgr(Color.Red), 1); } Trace.WriteLine(String.Format("HOG detection time: {0} ms", watch.ElapsedMilliseconds)); //ImageViewer.Show(image, String.Format("Detection Time: {0}ms", watch.ElapsedMilliseconds)); } } }
public void TestHOG1() { if (CudaInvoke.HasCuda) { using (CudaHOG hog = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8), 9)) using (Mat pedestrianDescriptor = hog.GetDefaultPeopleDetector()) using (Image <Bgr, Byte> image = new Image <Bgr, byte>("pedestrian.png")) { hog.SetSVMDetector(pedestrianDescriptor); //hog.GroupThreshold = 0; Stopwatch watch = Stopwatch.StartNew(); Rectangle[] rects; using (CudaImage <Bgr, Byte> CudaImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Bgra, Byte> gpuBgra = CudaImage.Convert <Bgra, Byte>()) using (VectorOfRect vRect = new VectorOfRect()) { hog.DetectMultiScale(gpuBgra, vRect); rects = vRect.ToArray(); } watch.Stop(); Assert.AreEqual(1, rects.Length); foreach (Rectangle rect in rects) { image.Draw(rect, new Bgr(Color.Red), 1); } Trace.WriteLine(String.Format("HOG detection time: {0} ms", watch.ElapsedMilliseconds)); //ImageViewer.Show(image, String.Format("Detection Time: {0}ms", watch.ElapsedMilliseconds)); } } }
public void TestCudaFASTDetector() { if (!CudaInvoke.HasCuda) { return; } using (Image <Bgr, Byte> img = new Image <Bgr, byte>("box.png")) using (CudaImage <Bgr, Byte> CudaImage = new CudaImage <Bgr, byte>(img)) using (CudaImage <Gray, Byte> grayCudaImage = CudaImage.Convert <Gray, Byte>()) using (CudaFastFeatureDetector featureDetector = new CudaFastFeatureDetector(10, true, FastDetector.DetectorType.Type9_16, 1000)) using (VectorOfKeyPoint kpts = new VectorOfKeyPoint()) using (GpuMat keyPointsMat = new GpuMat()) { featureDetector.DetectAsync(grayCudaImage, keyPointsMat); featureDetector.Convert(keyPointsMat, kpts); //featureDetector.DetectKeyPointsRaw(grayCudaImage, null, keyPointsMat); //featureDetector.DownloadKeypoints(keyPointsMat, kpts); foreach (MKeyPoint kpt in kpts.ToArray()) { img.Draw(new CircleF(kpt.Point, 3.0f), new Bgr(0, 255, 0), 1); } //ImageViewer.Show(img); } }
public static void Detect( Mat image, String faceFileName, List <Rectangle> faces, bool tryUseCuda, out long detectionTime) { Stopwatch watch; #if !(__IOS__ || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); } watch.Stop(); } } else #endif { //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds; }
public void TestColorConvert() { if (CudaInvoke.HasCuda) { Image <Bgr, Byte> img = new Image <Bgr, byte>(300, 400); img.SetRandUniform(new MCvScalar(0.0, 0.0, 0.0), new MCvScalar(255.0, 255.0, 255.0)); Image <Gray, Byte> imgGray = img.Convert <Gray, Byte>(); Image <Hsv, Byte> imgHsv = img.Convert <Hsv, Byte>(); CudaImage <Bgr, Byte> gpuImg = new CudaImage <Bgr, Byte>(img); CudaImage <Gray, Byte> gpuImgGray = gpuImg.Convert <Gray, Byte>(); CudaImage <Hsv, Byte> gpuImgHsv = gpuImg.Convert <Hsv, Byte>(); Assert.IsTrue(gpuImgGray.Equals(new CudaImage <Gray, Byte>(imgGray))); Assert.IsTrue(gpuImgHsv.ToImage().Equals(imgHsv)); Assert.IsTrue(gpuImgHsv.Equals(new CudaImage <Hsv, Byte>(imgHsv))); } }
public static void Detect( Mat image, String faceFileName, String eyeFileName, List <Rectangle> faces, List <Rectangle> eyes, bool tryUseCuda) { #if !(__IOS__ || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { if (face == null) { face = new CudaCascadeClassifier(faceFileName); } if (eye == null) { eye = new CudaCascadeClassifier(eyeFileName); } //using (face) //using (eye) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } } } #endif }
/// <summary> /// Нахождение знака по методу Хаара /// </summary> /// <param name="image">Исходное изображение</param> /// <param name="singFileName">Путь до каскада</param> /// <param name="sings">Список знаков на изображении</param> /// <param name="detectionTime">Время выполнения</param> public void Detect(IInputArray image, String singFileName, List <Rectangle> sings, out long detectionTime) { Stopwatch watch; using (InputArray iaImage = image.GetInputArray()) { if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) { using (CudaCascadeClassifier sing = new CudaCascadeClassifier(singFileName)) { sing.ScaleFactor = 1.1; //Коэфициент увеличения sing.MinNeighbors = 10; //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог sing.MinObjectSize = Size.Empty; //Минимальный размер watch = Stopwatch.StartNew(); //Таймер //Конвентируем изображение в серый цвет, подготавливаем регион с возможными вхождениями знаков на изображении using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { sing.DetectMultiScale(gpuGray, region); Rectangle[] singRegion = sing.Convert(region); sings.AddRange(singRegion); } watch.Stop(); } } else { //Читаем HaarCascade using (CascadeClassifier sing = new CascadeClassifier(singFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray); //Приводим в норму яркость и повышаем контрастность CvInvoke.EqualizeHist(ugray, ugray); //Обнаруживаем знак на сером изображении и сохраняем местоположение в виде прямоугольника Rectangle[] singsDetected = sing.DetectMultiScale( ugray, //Исходное изображение 1.1, //Коэффициент увеличения изображения 10, //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог new Size(20, 20)); //Минимальный размер sings.AddRange(singsDetected); } watch.Stop(); } } } detectionTime = watch.ElapsedMilliseconds; }
private void ProcessFrame(object sender, EventArgs e) { var mat = new Mat(); this.webcam.Read(mat); CudaImage <Bgr, Byte> gpuImg = new CudaImage <Bgr, byte>(); gpuImg.Upload(mat); CudaImage <Gray, Byte> grayImg = gpuImg.Convert <Gray, Byte>(); GpuMat region = new GpuMat(); haarCascade.DetectMultiScale(grayImg, region); Rectangle[] faceRegion = haarCascade.Convert(region); Rectangle face; if (faceRegion.Length > 0 && faceRegion[0].Width > 0) { if (!IsRegionValid(faceRegion[0])) { return; } face = faceRegion[0]; float meterPerPxl = (userFaceSize / face.Width) / 100f; this._userPosition.x = -(face.X + (face.Width / 2) - (camWidth / 2)) * ((userFaceSize / face.Width) / 100); this._userPosition.y = -(face.Y + (face.Height / 2) - (camHeight / 2)) * ((userFaceSize / face.Width) / 100); this._userPosition.z = -camDistanceRatio * ((userFaceSize / face.Width) / 100); currentFace = face; this.newFaceDetected = true; } else { currentFace.Width = -1; } /*if (webcamFeedbackEnabled) { * var img = mat.ToImage<Bgr, byte>(); * for (int i = 0; i < faceRegion.Length; i++) { * if (i == 0) * img.Draw(face, new Bgr(255, 255, 0), 4); * else * img.Draw(faceRegion[i], new Bgr(0, 255, 255), 4); * } * * Dispatcher.InvokeAsync(() => { * Debug.Log(img.Convert<Rgb, byte>().Bytes.Length); * currentFrame.LoadRawTextureData(img.Convert<Rgb, byte>().Bytes); * currentFrame.Apply(); * img.Dispose(); * }); * }*/ }
public static void Detect(IInputArray image, List <Rectangle> faces) { string faceFileName = @"./Resources/haarcascade_frontalface_default.xml"; using (InputArray iaImage = image.GetInputArray()) { #if !(__IOS__ || NETFX_CORE) if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); } } } else #endif { //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) { using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); } } } } }
/// <summary> /// Find the pedestrian in the image /// </summary> /// <param name="image">The image</param> /// <param name="processingTime">The pedestrian detection time in milliseconds</param> /// <returns>The image with pedestrian highlighted.</returns> public static Image<Bgr, Byte> Find(Image<Bgr, Byte> image, out long processingTime) { Stopwatch watch; Rectangle[] regions; //check if there is a compatible GPU to run pedestrian detection if (CudaInvoke.HasCuda) { //this is the GPU version using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8))) { des.SetSVMDetector(des.GetDefaultPeopleDetector()); watch = Stopwatch.StartNew(); using (CudaImage<Bgr, Byte> gpuImg = new CudaImage<Bgr, byte>(image)) using (CudaImage<Bgra, Byte> gpuBgra = gpuImg.Convert<Bgra, Byte>()) using (VectorOfRect vr = new VectorOfRect()) { CudaInvoke.CvtColor(gpuBgra, gpuBgra, ColorConversion.Bgr2Bgra); des.DetectMultiScale(gpuBgra,vr); regions = vr.ToArray(); } } } else { //this is the CPU version using (HOGDescriptor des = new HOGDescriptor()) { des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector()); //load the image to umat so it will automatically use opencl is available UMat umat = image.ToUMat(); watch = Stopwatch.StartNew(); //regions = des.DetectMultiScale(image); MCvObjectDetection[] results = des.DetectMultiScale(umat); regions = new Rectangle[results.Length]; for (int i = 0; i < results.Length; i++) regions[i] = results[i].Rect; } } watch.Stop(); processingTime = watch.ElapsedMilliseconds; foreach (Rectangle pedestrain in regions) { image.Draw(pedestrain, new Bgr(Color.Red), 1); } return image; }
public void TestCanny() { if (CudaInvoke.HasCuda) { using (Image <Bgr, Byte> image = new Image <Bgr, byte>("pedestrian.png")) using (CudaImage <Bgr, Byte> CudaImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gray = CudaImage.Convert <Gray, Byte>()) using (CudaImage <Gray, Byte> canny = new CudaImage <Gray, byte>(gray.Size)) using (CudaCannyEdgeDetector detector = new CudaCannyEdgeDetector(20, 100, 3, false)) { detector.Detect(gray, canny); //GpuInvoke.Canny(gray, canny, 20, 100, 3, false); //ImageViewer.Show(canny); } } }
IImage CudaDetect(IImage original, List <Rectangle> faces, List <Rectangle> eyes) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(original)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } } IImage copy = CopyAndDraw(original, faces.ToArray()); copy = CopyAndDraw(copy, eyes.ToArray()); return(copy); //return eyes; }
public List <Rectangle> FilterImage(Image <Bgr, byte> currentFrame) { using (GpuMat faceRegionMat = new GpuMat()) { List <Rectangle> rectangles = new List <Rectangle>(); CudaImage <Gray, byte> cudaIMG = new CudaImage <Gray, byte>(currentFrame.Convert <Gray, byte>()); foreach (CudaCascadeClassifier F in filters) { F.DetectMultiScale(cudaIMG.Convert <Gray, byte>(), faceRegionMat); Rectangle[] detectedSubjects = F.Convert(faceRegionMat); foreach (Rectangle R in detectedSubjects) { rectangles.Add(R); } } return(rectangles); } }
public Rectangle[] FindFaces(Mat frame, ref int type) { if (CudaInvoke.HasCuda && Global.useCuda) { using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(frame)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { cuda_ccFace.DetectMultiScale(gpuGray, region); Rectangle[] faces = cuda_ccFace.Convert(region); if (faces.Length == 0) { cuda_ccSideFace.DetectMultiScale(gpuGray, region); faces = cuda_ccSideFace.Convert(region); if (faces.Length == 0) { Image <Gray, byte> grayImage = gpuGray.ToImage(); faces = ccAltFace.DetectMultiScale(grayImage, 1.02, 5, cuda_ccFace.MinObjectSize); if (faces.Length != 0) { type = 3; } } else { type = 2; } } else { type = 1; } return(faces); } } else { return(FindFaces_WithoutGPU(frame, ref type)); } //return null; }
public void Detect( IInputArray image, List <Rectangle> faces, List <Rectangle> eyes) { using (InputArray iaImage = image.GetInputArray()) { #if !(__IOS__ || NETFX_CORE) if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) { // Traitement avec CUDA using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { faceCuda.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = faceCuda.Convert(region); faces.AddRange(faceRegion); /*foreach (Rectangle f in faceRegion) * { * using (CudaImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f)) * { * //For some reason a clone is required. * //Might be a bug of CudaCascadeClassifier in opencv * using (CudaImage<Gray, Byte> clone = faceImg.Clone(null)) * using (GpuMat eyeRegionMat = new GpuMat()) * { * eyeCuda.DetectMultiScale(clone, eyeRegionMat); * Rectangle[] eyeRegion = eyeCuda.Convert(eyeRegionMat); * foreach (Rectangle e in eyeRegion) * { * Rectangle eyeRect = e; * eyeRect.Offset(f.X, f.Y); * eyes.Add(eyeRect); * } * } * } * }*/ } } else #endif { // Traitement sans CUDA using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = faceCpu.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Get the region of interest on the faces using (UMat faceRegion = new UMat(ugray, f)) { Rectangle[] eyesDetected = eyeCpu.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle e in eyesDetected) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } } }
public void TestColorConvert() { if (CudaInvoke.HasCuda) { Image<Bgr, Byte> img = new Image<Bgr, byte>(300, 400); img.SetRandUniform(new MCvScalar(0.0, 0.0, 0.0), new MCvScalar(255.0, 255.0, 255.0)); Image<Gray, Byte> imgGray = img.Convert<Gray, Byte>(); Image<Hsv, Byte> imgHsv = img.Convert<Hsv, Byte>(); CudaImage<Bgr, Byte> gpuImg = new CudaImage<Bgr, Byte>(img); CudaImage<Gray, Byte> gpuImgGray = gpuImg.Convert<Gray, Byte>(); CudaImage<Hsv, Byte> gpuImgHsv = gpuImg.Convert<Hsv, Byte>(); Assert.IsTrue(gpuImgGray.Equals(new CudaImage<Gray, Byte>(imgGray))); Assert.IsTrue(gpuImgHsv.ToImage().Equals(imgHsv)); Assert.IsTrue(gpuImgHsv.Equals(new CudaImage<Hsv, Byte>(imgHsv))); } }
public void TestCanny() { if (CudaInvoke.HasCuda) { using (Image<Bgr, Byte> image = new Image<Bgr, byte>("pedestrian.png")) using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(image)) using (CudaImage<Gray, Byte> gray = CudaImage.Convert<Gray, Byte>()) using (CudaImage<Gray, Byte> canny = new CudaImage<Gray,byte>(gray.Size)) using (CudaCannyEdgeDetector detector = new CudaCannyEdgeDetector(20, 100, 3, false)) { detector.Detect(gray, canny); //GpuInvoke.Canny(gray, canny, 20, 100, 3, false); //ImageViewer.Show(canny); } } }
public static void DetectFace( Mat image, bool detectEyes, List <Rectangle> faces, List <Rectangle> eyes, out long detectionTime) { Stopwatch watch; String faceFileName = Application.dataPath + "\\Emgu\\haarcascades\\haarcascade_frontalface_default.xml"; String eyeFileName = Application.dataPath + "\\Emgu\\haarcascade_eye.xml"; if (!detectEyes) { eyeFileName = ""; } #if !(IOS || NETFX_CORE) if (CudaInvoke.HasCuda) { CudaCascadeClassifier face = null; if (faceFileName != "") { face = new CudaCascadeClassifier(faceFileName); } CudaCascadeClassifier eye = null; if (eyeFileName != "") { eye = new CudaCascadeClassifier(eyeFileName); } { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; if (eye != null) { eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; } watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte> (image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte> ()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); if (eye != null) { foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } } watch.Stop(); detectionTime = watch.ElapsedMilliseconds; } } else #endif { detectionTime = 0; } }
public void TestHOG1() { if (CudaInvoke.HasCuda) { using (CudaHOG hog = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8,8), 9)) using (Mat pedestrianDescriptor = hog.GetDefaultPeopleDetector()) using (Image<Bgr, Byte> image = new Image<Bgr, byte>("pedestrian.png")) { hog.SetSVMDetector(pedestrianDescriptor); //hog.GroupThreshold = 0; Stopwatch watch = Stopwatch.StartNew(); Rectangle[] rects; using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(image)) using (CudaImage<Bgra, Byte> gpuBgra = CudaImage.Convert<Bgra, Byte>()) using (VectorOfRect vRect = new VectorOfRect()) { hog.DetectMultiScale(gpuBgra, vRect); rects = vRect.ToArray(); } watch.Stop(); Assert.AreEqual(1, rects.Length); foreach (Rectangle rect in rects) image.Draw(rect, new Bgr(Color.Red), 1); Trace.WriteLine(String.Format("HOG detection time: {0} ms", watch.ElapsedMilliseconds)); //ImageViewer.Show(image, String.Format("Detection Time: {0}ms", watch.ElapsedMilliseconds)); } } }
//The cuda cascade classifier doesn't seem to be able to load "haarcascade_frontalface_default.xml" file in this release //disabling CUDA module for now //bool tryUseCuda = false; //FaceDetection.DetectFaceAndEyes( // image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", // faces, eyes, // tryUseCuda, // out detectionTime); //foreach (Rectangle face in faces) // CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 2); //foreach (Rectangle eye in eyes) // CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 2); ////display the image //ImageViewer.Show(image, String.Format( // "Completed face and eye detection using {0} in {1} milliseconds", // (tryUseCuda && CudaInvoke.HasCuda) ? "GPU" // : CvInvoke.UseOpenCL ? "OpenCL" // : "CPU", // detectionTime)); public static void DetectFaceAndEyes( Mat image, String faceFileName, String eyeFileName, List <Rectangle> faces, List <Rectangle> eyes, bool tryUseCuda, out long detectionTime) { Stopwatch watch; #if !(__IOS__ || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } watch.Stop(); } } else #endif { //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) using (CascadeClassifier eye = new CascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Get the region of interest on the faces using (UMat faceRegion = new UMat(ugray, f)) { Rectangle[] eyesDetected = eye.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle e in eyesDetected) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds; }
public List<Face> FindFaces(Image<Bgr, byte> image, string faceFileName, string eyeFileName, double scale, int neighbors, int minSize) { List<Face> faces = new List<Face>(); List<Rectangle> facesRect = new List<Rectangle>(); List<Rectangle> eyesRect = new List<Rectangle>(); try { //Console.WriteLine(" FaceDetectGPU FindFaces faceFileName=" + faceFileName + " cuda = " + CudaInvoke.HasCuda); using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) { using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(image)) using (CudaImage<Gray, Byte> CudaGray = CudaImage.Convert<Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(CudaGray, region); Rectangle[] faceRegion = face.Convert(region); facesRect.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage<Gray, Byte> faceImg = CudaGray.GetSubRect(f)) { using (CudaImage<Gray, Byte> clone = faceImg.Clone(null)) { Face facemodel = new Face(); eyesRect = new List<Rectangle>(FindEyes(eyeFileName, clone)); if (eyesRect != null) { facemodel.EyesRects = eyesRect; facemodel.EyesCount = eyesRect.Count; } else { continue; } facemodel.FaceImage = clone.Bitmap; facemodel.Height = facemodel.FaceImage.Height; facemodel.Width = facemodel.FaceImage.Width; facemodel.FaceRect = f; facemodel.FramePosX = f.X; facemodel.FramePosY = f.Y; facemodel.ImageFrameSize = image.Size; Gray avgf = new Gray(); MCvScalar avstd = new MCvScalar(); clone.ToImage().AvgSdv(out avgf, out avstd); facemodel.StdDev = avstd.V0; faces.Add(facemodel); if (facemodel.FaceScore > 39) Console.WriteLine("FaceDetect USING gpuCUDA Add faceModel" + facemodel.FaceScore); break; } } } } } } catch (Exception cudaerrJones) { Console.WriteLine("cudaerrJones = " + cudaerrJones); } return faces; }
public void TestCudaFASTDetector() { if (!CudaInvoke.HasCuda) return; using (Image<Bgr, Byte> img = new Image<Bgr, byte>("box.png")) using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(img)) using (CudaImage<Gray, Byte> grayCudaImage = CudaImage.Convert<Gray, Byte>()) using (CudaFastFeatureDetector featureDetector = new CudaFastFeatureDetector(10, true, FastDetector.DetectorType.Type9_16, 1000 )) using (VectorOfKeyPoint kpts = new VectorOfKeyPoint()) using (GpuMat keyPointsMat = new GpuMat()) { featureDetector.DetectAsync(grayCudaImage, keyPointsMat); featureDetector.Convert(keyPointsMat, kpts); //featureDetector.DetectKeyPointsRaw(grayCudaImage, null, keyPointsMat); //featureDetector.DownloadKeypoints(keyPointsMat, kpts); foreach (MKeyPoint kpt in kpts.ToArray()) { img.Draw(new CircleF(kpt.Point, 3.0f), new Bgr(0, 255, 0), 1); } //ImageViewer.Show(img); } }
public static void Detect( Mat image, String faceFileName, String eyeFileName, List<Rectangle> faces, List<Rectangle> eyes, bool tryUseCuda, bool tryUseOpenCL, out long detectionTime) { Stopwatch watch; #if !(IOS || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (CudaImage<Bgr, Byte> gpuImage = new CudaImage<Bgr, byte>(image)) using (CudaImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage<Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } watch.Stop(); } } else #endif { //Many opencl functions require opencl compatible gpu devices. //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices). CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice; //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) //using (CascadeClassifier eye = new CascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 2); faces.AddRange(facesDetected); //foreach (Rectangle f in facesDetected) //{ // //Get the region of interest on the faces // using (UMat faceRegion = new UMat(ugray, f)) // { // Rectangle[] eyesDetected = eye.DetectMultiScale( // faceRegion, // 1.1, // 10, // new Size(20, 20)); // foreach (Rectangle e in eyesDetected) // { // Rectangle eyeRect = e; // eyeRect.Offset(f.X, f.Y); // eyes.Add(eyeRect); // } // } //} } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds; }
public static void Detect( Mat image, String faceFileName, String eyeFileName, List <Rectangle> faces, List <Rectangle> eyes, bool tryUseCuda, bool tryUseOpenCL, out long detectionTime) { Stopwatch watch; #if !(IOS || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) { Rectangle[] faceRegion = face.DetectMultiScale(gpuGray, 1.1, 10, Size.Empty); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) { Rectangle[] eyeRegion = eye.DetectMultiScale(clone, 1.1, 10, Size.Empty); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } watch.Stop(); } } else #endif { //Many opencl functions require opencl compatible gpu devices. //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices). CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice; //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) using (CascadeClassifier eye = new CascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Get the region of interest on the faces using (UMat faceRegion = new UMat(ugray, f)) { Rectangle[] eyesDetected = eye.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle e in eyesDetected) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds; }
public static void Detect( IInputArray image, String faceFileName, String eyeFileName, List<Rectangle> faces, List<Rectangle> eyes, out long detectionTime) { Stopwatch watch; using (InputArray iaImage = image.GetInputArray()) { #if !(__IOS__ || NETFX_CORE) if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (CudaImage<Bgr, Byte> gpuImage = new CudaImage<Bgr, byte>(image)) using (CudaImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage<Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } watch.Stop(); } } else #endif { //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) using (CascadeClassifier eye = new CascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Get the region of interest on the faces using (UMat faceRegion = new UMat(ugray, f)) { Rectangle[] eyesDetected = eye.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle e in eyesDetected) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds; } }
public static void Detect( IInputArray image, string faceFileName, string eyeFileName, List <Rectangle> faces, List <Rectangle> eyes, out long detectionTime) { Stopwatch watch; using (var iaImage = image.GetInputArray()) { #if !(__IOS__ || NETFX_CORE) if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) { using (var face = new CudaCascadeClassifier(faceFileName)) using (var eye = new CudaCascadeClassifier(eyeFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (var gpuImage = new CudaImage <Bgr, byte>(image)) using (var gpuGray = gpuImage.Convert <Gray, byte>()) using (var region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); var faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (var f in faceRegion) { using (var faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (var clone = faceImg.Clone(null)) using (var eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); var eyeRegion = eye.Convert(eyeRegionMat); foreach (var e in eyeRegion) { var eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } watch.Stop(); } } else #endif using (var face = new CascadeClassifier(faceFileName)) using (var eye = new CascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (var ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel var facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (var f in facesDetected) { //Get the region of interest on the faces using (var faceRegion = new UMat(ugray, f)) { var eyesDetected = eye.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (var e in eyesDetected) { var eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } watch.Stop(); } detectionTime = watch.ElapsedMilliseconds; } }
public static void Detect( Mat image, String faceFileName, String eyeleftFileName, string eyerightFileName, List <Rectangle> faces, List <Rectangle> eyesleft, List <Rectangle> eyesright, bool tryUseCuda, bool tryUseOpenCL, out long detectionTime) { Stopwatch watch; #if !(IOS || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eyeleft = new CudaCascadeClassifier(eyeleftFileName)) using (CudaCascadeClassifier eyeright = new CudaCascadeClassifier(eyerightFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eyeleft.ScaleFactor = 1.1; eyeleft.MinNeighbors = 10; eyeleft.MinObjectSize = Size.Empty; eyeright.ScaleFactor = 1.1; eyeright.MinNeighbors = 10; eyeright.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eyeleft.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eyeleft.Convert(eyeRegionMat); foreach (Rectangle eleft in eyeRegion) { Rectangle eyeRectleft = eleft; eyeRectleft.Offset(f.X, f.Y); eyesleft.Add(eyeRectleft); } } using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eyeright.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eyeright.Convert(eyeRegionMat); foreach (Rectangle eright in eyeRegion) { Rectangle eyeRectright = eright; eyeRectright.Offset(f.X, f.Y); eyesright.Add(eyeRectright); } } } } } watch.Stop(); } } else #endif { //Many opencl functions require opencl compatible gpu devices. //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices). CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice; //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) using (CascadeClassifier eyeleft = new CascadeClassifier(eyeleftFileName)) using (CascadeClassifier eyeright = new CascadeClassifier(eyerightFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //Cân bằng sáng của ảnh CvInvoke.EqualizeHist(ugray, ugray); //Phát hiện các khuôn mặt từ hình ảnh màu xám và lưu các vị trí làm hình chữ nhật // Chiều thứ nhất là kênh // Kích thước thứ hai là chỉ mục của hình chữ nhật trong kênh cụ thể Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Sử dụng khu vực của khuôn mặt using (UMat faceRegion = new UMat(ugray, f)) { //tìm hình chữ nhật của mắt phải Rectangle[] eyesleftDetected = eyeleft.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle eleft in eyesleftDetected) { Rectangle eyeRectleft = eleft; eyeRectleft.Offset(f.X, f.Y); eyesleft.Add(eyeRectleft); } //tìm hình chữ nhật của mắt phải Rectangle[] eyesrightDetected = eyeright.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle eright in eyesrightDetected) { Rectangle eyeRectright = eright; eyeRectright.Offset(f.X, f.Y); eyesright.Add(eyeRectright); } } } } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds;//đo tổng thời gian trôi qua }