private static Rectangle[] GetRectangles(CudaCascadeClassifier classifier, GpuMat region) { var facesBufGpu = new GpuMat(); classifier.DetectMultiScale(region, facesBufGpu); return(classifier.Convert(facesBufGpu)); }
public void InitializeCameraAndClassifier(LoadingTextBehaviour loadingText) { if (!CudaInvoke.HasCuda) { throw new Exception("Error! Cuda not detected!"); } loadingText.loadingMsg = "Creating buffers"; Dispatcher.Invoke(() => { this.currentFace = new Rectangle(0, 0, -1, -1); this.currentFrame = new Texture2D(camWidth, camHeight, TextureFormat.RGB24, false); this.userPosition = new Vector3( inGameCamera.transform.position.x / inGameCamera.scaleFactor, inGameCamera.transform.position.y / inGameCamera.scaleFactor, inGameCamera.transform.position.z / inGameCamera.scaleFactor ); }); Debug.Log("*************Loading Haar cascade"); loadingText.loadingMsg = "Loading Haar cascade"; this.haarCascade = new CudaCascadeClassifier(@"C:\haarcascade_frontalface_default.xml"); Debug.Log("*************Setting Haar cascade properties"); loadingText.loadingMsg = "Initializing Haar cascade"; this.haarCascade.ScaleFactor = 1.005; this.haarCascade.MinNeighbors = 20; this.haarCascade.MinObjectSize = Size.Empty; this.haarCascade.MaxNumObjects = 1; this.haarCascade.FindLargestObject = true; Debug.Log("*************Loading webcam"); loadingText.loadingMsg = "Loading Webcam"; try { this.webcam = new VideoCapture(inGameCamera.camIndex, VideoCapture.API.DShow); } catch (System.Exception error) { Debug.LogError(error); throw; } Debug.Log("*************Setting webcam properties"); loadingText.loadingMsg = "Initializing Webcam"; this.webcam.SetCaptureProperty(CapProp.Fps, camFps); this.webcam.SetCaptureProperty(CapProp.FrameHeight, camHeight); this.webcam.SetCaptureProperty(CapProp.FrameWidth, camWidth); this.webcam.SetCaptureProperty(CapProp.Buffersize, 2); this.camDistanceRatio = (camWidth * camFov / inGameCamera.horizontalFov) / Mathf.Tan(Mathf.Deg2Rad * camFov / 2); Debug.Log("*************Loading event handler"); loadingText.loadingMsg = "Loading event handler"; this.webcam.ImageGrabbed += ProcessFrame; Debug.Log("*************Starting capture"); }
public static void Detect( Mat image, String faceFileName, List <Rectangle> faces, bool tryUseCuda, out long detectionTime) { Stopwatch watch; #if !(__IOS__ || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); } watch.Stop(); } } else #endif { //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds; }
public static void Detect( Mat image, String faceFileName, String eyeFileName, List <Rectangle> faces, List <Rectangle> eyes, bool tryUseCuda) { #if !(__IOS__ || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { if (face == null) { face = new CudaCascadeClassifier(faceFileName); } if (eye == null) { eye = new CudaCascadeClassifier(eyeFileName); } //using (face) //using (eye) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } } } #endif }
/// <summary> /// Нахождение знака по методу Хаара /// </summary> /// <param name="image">Исходное изображение</param> /// <param name="singFileName">Путь до каскада</param> /// <param name="sings">Список знаков на изображении</param> /// <param name="detectionTime">Время выполнения</param> public void Detect(IInputArray image, String singFileName, List <Rectangle> sings, out long detectionTime) { Stopwatch watch; using (InputArray iaImage = image.GetInputArray()) { if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) { using (CudaCascadeClassifier sing = new CudaCascadeClassifier(singFileName)) { sing.ScaleFactor = 1.1; //Коэфициент увеличения sing.MinNeighbors = 10; //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог sing.MinObjectSize = Size.Empty; //Минимальный размер watch = Stopwatch.StartNew(); //Таймер //Конвентируем изображение в серый цвет, подготавливаем регион с возможными вхождениями знаков на изображении using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { sing.DetectMultiScale(gpuGray, region); Rectangle[] singRegion = sing.Convert(region); sings.AddRange(singRegion); } watch.Stop(); } } else { //Читаем HaarCascade using (CascadeClassifier sing = new CascadeClassifier(singFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray); //Приводим в норму яркость и повышаем контрастность CvInvoke.EqualizeHist(ugray, ugray); //Обнаруживаем знак на сером изображении и сохраняем местоположение в виде прямоугольника Rectangle[] singsDetected = sing.DetectMultiScale( ugray, //Исходное изображение 1.1, //Коэффициент увеличения изображения 10, //Группировка предварительно обнаруженных событий. Чем их меньше, тем больше ложных тревог new Size(20, 20)); //Минимальный размер sings.AddRange(singsDetected); } watch.Stop(); } } } detectionTime = watch.ElapsedMilliseconds; }
public static void Detect(IInputArray image, List <Rectangle> faces) { string faceFileName = @"./Resources/haarcascade_frontalface_default.xml"; using (InputArray iaImage = image.GetInputArray()) { #if !(__IOS__ || NETFX_CORE) if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); } } } else #endif { //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) { using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); } } } } }
public DetectObjectCuda(string detectorPath) { if (!CudaInvoke.HasCuda) { UnityEngine.Debug.LogError("Your system doesn't have Cuda support!"); return; } _detectorPath = detectorPath; _classifier = new CudaCascadeClassifier(_detectorPath); _classifier.ScaleFactor = 1.1; _classifier.MinNeighbors = 10; _classifier.MinObjectSize = Size.Empty; }
public CudaFaceEyeDetector(string faceFileName, string eyeFileName) { System.Console.WriteLine($"Have I Cuda? {CudaInvoke.HasCuda}"); faceClassifier = new CudaCascadeClassifier(faceFileName) { ScaleFactor = 1.1, MinNeighbors = 10, MinObjectSize = Size.Empty }; eyeClassifier = new CudaCascadeClassifier(eyeFileName) { ScaleFactor = 1.1, MinNeighbors = 10, MinObjectSize = new Size(20, 20) }; }
IImage CudaDetect(IImage original, List <Rectangle> faces, List <Rectangle> eyes) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(original)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } } IImage copy = CopyAndDraw(original, faces.ToArray()); copy = CopyAndDraw(copy, eyes.ToArray()); return(copy); //return eyes; }
public Rectangle[] Detect(Image <Gray, byte> grayframe) { using (CudaCascadeClassifier des = new CudaCascadeClassifier(ConfigurationManager.AppSettings["haarPath"])) { using (GpuMat cudaBgra = new GpuMat()) { using (VectorOfRect vr = new VectorOfRect()) { //CudaInvoke.CvtColor(grayframe, cudaBgra, ColorConversion.Bgr2Bgra); cudaBgra.Upload(grayframe); //CudaInvoke.CvtColor(grayframe, cudaBgra, ColorConversion.Gray2Bgra); des.DetectMultiScale(cudaBgra, vr); var regions = vr.ToArray(); return(regions); } } } }
public FaceIdentification(int minFaceSize) { // eigen 2100 // lbph 100 // fisher 250-500 //recognizer = new EigenFaceRecognizer(80,double.PositiveInfinity); //recognizer = new LBPHFaceRecognizer(1, 8, 8, 8); //recognizer = new FisherFaceRecognizer(0, 3500); // this.minFaceSize = minFaceSize; try { ccFace = new CascadeClassifier(faceHaar); ccSideFace = new CascadeClassifier(sideFaceHaar); ccAltFace = new CascadeClassifier(faceAltHaar); cuda_ccFace = new CudaCascadeClassifier(faceHaar); cuda_ccSideFace = new CudaCascadeClassifier(sideFaceHaar); //cuda_ccAltFace = new CudaCascadeClassifier(faceAltHaar); cuda_ccFace.MinNeighbors = 5; cuda_ccFace.ScaleFactor = 1.02; cuda_ccSideFace.MinNeighbors = 5; cuda_ccSideFace.ScaleFactor = 1.02; //cuda_ccAltFace.MinNeighbors = 5; //cuda_ccAltFace.ScaleFactor = 1.02; cuda_ccFace.MinObjectSize = new Size(640 / minFaceSize, 480 / minFaceSize); cuda_ccSideFace.MinObjectSize = cuda_ccFace.MinObjectSize; //cuda_ccAltFace.MinObjectSize = cuda_ccFace.MinObjectSize; } catch (Exception ex) { //MessageBox.Show(ex.Message); } }
public Snap(string faceFileName, string eyeFileName) { // Dans le constructeur, il suffit d'instancier les classifieurs #if !(__IOS__ || NETFX_CORE) if (CudaInvoke.HasCuda) { faceCuda = new CudaCascadeClassifier(faceFileName); faceCuda.ScaleFactor = 1.1; faceCuda.MinNeighbors = 10; faceCuda.MinObjectSize = Size.Empty; eyeCuda = new CudaCascadeClassifier(eyeFileName); eyeCuda.ScaleFactor = 1.1; eyeCuda.MinNeighbors = 10; eyeCuda.MinObjectSize = Size.Empty; } #endif faceCpu = new CascadeClassifier(faceFileName); eyeCpu = new CascadeClassifier(eyeFileName); }
public static void Detect( Mat image, String faceFileName, String eyeFileName, List <Rectangle> faces, List <Rectangle> eyes, bool tryUseCuda, bool tryUseOpenCL, out long detectionTime) { Stopwatch watch; #if !(IOS || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) { Rectangle[] faceRegion = face.DetectMultiScale(gpuGray, 1.1, 10, Size.Empty); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) { Rectangle[] eyeRegion = eye.DetectMultiScale(clone, 1.1, 10, Size.Empty); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } watch.Stop(); } } else #endif { //Many opencl functions require opencl compatible gpu devices. //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices). CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice; //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) using (CascadeClassifier eye = new CascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Get the region of interest on the faces using (UMat faceRegion = new UMat(ugray, f)) { Rectangle[] eyesDetected = eye.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle e in eyesDetected) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds; }
public static void DetectFace( Mat image, bool detectEyes, List <Rectangle> faces, List <Rectangle> eyes, out long detectionTime) { Stopwatch watch; String faceFileName = Application.dataPath + "\\Emgu\\haarcascades\\haarcascade_frontalface_default.xml"; String eyeFileName = Application.dataPath + "\\Emgu\\haarcascade_eye.xml"; if (!detectEyes) { eyeFileName = ""; } #if !(IOS || NETFX_CORE) if (CudaInvoke.HasCuda) { CudaCascadeClassifier face = null; if (faceFileName != "") { face = new CudaCascadeClassifier(faceFileName); } CudaCascadeClassifier eye = null; if (eyeFileName != "") { eye = new CudaCascadeClassifier(eyeFileName); } { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; if (eye != null) { eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; } watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte> (image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte> ()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); if (eye != null) { foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } } watch.Stop(); detectionTime = watch.ElapsedMilliseconds; } } else #endif { detectionTime = 0; } }
//The cuda cascade classifier doesn't seem to be able to load "haarcascade_frontalface_default.xml" file in this release //disabling CUDA module for now //bool tryUseCuda = false; //FaceDetection.DetectFaceAndEyes( // image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", // faces, eyes, // tryUseCuda, // out detectionTime); //foreach (Rectangle face in faces) // CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 2); //foreach (Rectangle eye in eyes) // CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 2); ////display the image //ImageViewer.Show(image, String.Format( // "Completed face and eye detection using {0} in {1} milliseconds", // (tryUseCuda && CudaInvoke.HasCuda) ? "GPU" // : CvInvoke.UseOpenCL ? "OpenCL" // : "CPU", // detectionTime)); public static void DetectFaceAndEyes( Mat image, String faceFileName, String eyeFileName, List <Rectangle> faces, List <Rectangle> eyes, bool tryUseCuda, out long detectionTime) { Stopwatch watch; #if !(__IOS__ || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eye = new CudaCascadeClassifier(eyeFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eye.Convert(eyeRegionMat); foreach (Rectangle e in eyeRegion) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } watch.Stop(); } } else #endif { //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) using (CascadeClassifier eye = new CascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Get the region of interest on the faces using (UMat faceRegion = new UMat(ugray, f)) { Rectangle[] eyesDetected = eye.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle e in eyesDetected) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds; }
public static void Detect( IInputArray image, string faceFileName, string eyeFileName, List <Rectangle> faces, List <Rectangle> eyes, out long detectionTime) { Stopwatch watch; using (var iaImage = image.GetInputArray()) { #if !(__IOS__ || NETFX_CORE) if (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) { using (var face = new CudaCascadeClassifier(faceFileName)) using (var eye = new CudaCascadeClassifier(eyeFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eye.ScaleFactor = 1.1; eye.MinNeighbors = 10; eye.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (var gpuImage = new CudaImage <Bgr, byte>(image)) using (var gpuGray = gpuImage.Convert <Gray, byte>()) using (var region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); var faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (var f in faceRegion) { using (var faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (var clone = faceImg.Clone(null)) using (var eyeRegionMat = new GpuMat()) { eye.DetectMultiScale(clone, eyeRegionMat); var eyeRegion = eye.Convert(eyeRegionMat); foreach (var e in eyeRegion) { var eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } } watch.Stop(); } } else #endif using (var face = new CascadeClassifier(faceFileName)) using (var eye = new CascadeClassifier(eyeFileName)) { watch = Stopwatch.StartNew(); using (var ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel var facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (var f in facesDetected) { //Get the region of interest on the faces using (var faceRegion = new UMat(ugray, f)) { var eyesDetected = eye.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (var e in eyesDetected) { var eyeRect = e; eyeRect.Offset(f.X, f.Y); eyes.Add(eyeRect); } } } } watch.Stop(); } detectionTime = watch.ElapsedMilliseconds; } }
public static void Detect( Mat image, String faceFileName, String eyeleftFileName, string eyerightFileName, List <Rectangle> faces, List <Rectangle> eyesleft, List <Rectangle> eyesright, bool tryUseCuda, bool tryUseOpenCL, out long detectionTime) { Stopwatch watch; #if !(IOS || NETFX_CORE) if (tryUseCuda && CudaInvoke.HasCuda) { using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) using (CudaCascadeClassifier eyeleft = new CudaCascadeClassifier(eyeleftFileName)) using (CudaCascadeClassifier eyeright = new CudaCascadeClassifier(eyerightFileName)) { face.ScaleFactor = 1.1; face.MinNeighbors = 10; face.MinObjectSize = Size.Empty; eyeleft.ScaleFactor = 1.1; eyeleft.MinNeighbors = 10; eyeleft.MinObjectSize = Size.Empty; eyeright.ScaleFactor = 1.1; eyeright.MinNeighbors = 10; eyeright.MinObjectSize = Size.Empty; watch = Stopwatch.StartNew(); using (CudaImage <Bgr, Byte> gpuImage = new CudaImage <Bgr, byte>(image)) using (CudaImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(gpuGray, region); Rectangle[] faceRegion = face.Convert(region); faces.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage <Gray, Byte> faceImg = gpuGray.GetSubRect(f)) { //For some reason a clone is required. //Might be a bug of CudaCascadeClassifier in opencv using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eyeleft.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eyeleft.Convert(eyeRegionMat); foreach (Rectangle eleft in eyeRegion) { Rectangle eyeRectleft = eleft; eyeRectleft.Offset(f.X, f.Y); eyesleft.Add(eyeRectleft); } } using (CudaImage <Gray, Byte> clone = faceImg.Clone(null)) using (GpuMat eyeRegionMat = new GpuMat()) { eyeright.DetectMultiScale(clone, eyeRegionMat); Rectangle[] eyeRegion = eyeright.Convert(eyeRegionMat); foreach (Rectangle eright in eyeRegion) { Rectangle eyeRectright = eright; eyeRectright.Offset(f.X, f.Y); eyesright.Add(eyeRectright); } } } } } watch.Stop(); } } else #endif { //Many opencl functions require opencl compatible gpu devices. //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices). CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice; //Read the HaarCascade objects using (CascadeClassifier face = new CascadeClassifier(faceFileName)) using (CascadeClassifier eyeleft = new CascadeClassifier(eyeleftFileName)) using (CascadeClassifier eyeright = new CascadeClassifier(eyerightFileName)) { watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //Cân bằng sáng của ảnh CvInvoke.EqualizeHist(ugray, ugray); //Phát hiện các khuôn mặt từ hình ảnh màu xám và lưu các vị trí làm hình chữ nhật // Chiều thứ nhất là kênh // Kích thước thứ hai là chỉ mục của hình chữ nhật trong kênh cụ thể Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new Size(20, 20)); faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Sử dụng khu vực của khuôn mặt using (UMat faceRegion = new UMat(ugray, f)) { //tìm hình chữ nhật của mắt phải Rectangle[] eyesleftDetected = eyeleft.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle eleft in eyesleftDetected) { Rectangle eyeRectleft = eleft; eyeRectleft.Offset(f.X, f.Y); eyesleft.Add(eyeRectleft); } //tìm hình chữ nhật của mắt phải Rectangle[] eyesrightDetected = eyeright.DetectMultiScale( faceRegion, 1.1, 10, new Size(20, 20)); foreach (Rectangle eright in eyesrightDetected) { Rectangle eyeRectright = eright; eyeRectright.Offset(f.X, f.Y); eyesright.Add(eyeRectright); } } } } watch.Stop(); } } detectionTime = watch.ElapsedMilliseconds;//đo tổng thời gian trôi qua }