public List<Face> FindFaces(Image<Bgr, byte> image, string faceFileName, string eyeFileName, double scale, int neighbors, int minSize) { List<Face> faces = new List<Face>(); List<Rectangle> facesRect = new List<Rectangle>(); List<Rectangle> eyesRect = new List<Rectangle>(); try { //Console.WriteLine(" FaceDetectGPU FindFaces faceFileName=" + faceFileName + " cuda = " + CudaInvoke.HasCuda); using (CudaCascadeClassifier face = new CudaCascadeClassifier(faceFileName)) { using (CudaImage<Bgr, Byte> CudaImage = new CudaImage<Bgr, byte>(image)) using (CudaImage<Gray, Byte> CudaGray = CudaImage.Convert<Gray, Byte>()) using (GpuMat region = new GpuMat()) { face.DetectMultiScale(CudaGray, region); Rectangle[] faceRegion = face.Convert(region); facesRect.AddRange(faceRegion); foreach (Rectangle f in faceRegion) { using (CudaImage<Gray, Byte> faceImg = CudaGray.GetSubRect(f)) { using (CudaImage<Gray, Byte> clone = faceImg.Clone(null)) { Face facemodel = new Face(); eyesRect = new List<Rectangle>(FindEyes(eyeFileName, clone)); if (eyesRect != null) { facemodel.EyesRects = eyesRect; facemodel.EyesCount = eyesRect.Count; } else { continue; } facemodel.FaceImage = clone.Bitmap; facemodel.Height = facemodel.FaceImage.Height; facemodel.Width = facemodel.FaceImage.Width; facemodel.FaceRect = f; facemodel.FramePosX = f.X; facemodel.FramePosY = f.Y; facemodel.ImageFrameSize = image.Size; Gray avgf = new Gray(); MCvScalar avstd = new MCvScalar(); clone.ToImage().AvgSdv(out avgf, out avstd); facemodel.StdDev = avstd.V0; faces.Add(facemodel); if (facemodel.FaceScore > 39) Console.WriteLine("FaceDetect USING gpuCUDA Add faceModel" + facemodel.FaceScore); break; } } } } } } catch (Exception cudaerrJones) { Console.WriteLine("cudaerrJones = " + cudaerrJones); } return faces; }
public List<Face> FindFaces(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image, string faceFileName, string eyeFileName, double scale, int neighbors, int minSize) { List<Face> faces = new List<Face>(); List<Rectangle> facesRect = new List<Rectangle>(); List<Rectangle> eyesRect = new List<Rectangle>(); try { using (CascadeClassifier face = createClassifier(faceFileName)) { using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>()) { gray._EqualizeHist(); Rectangle[] facesDetected = face.DetectMultiScale(gray, scale, neighbors, new Size(minSize, minSize), Size.Empty); foreach (Rectangle f in facesDetected) { using (Image<Gray, Byte> faceImg = gray.GetSubRect(f)) { using (Image<Gray, Byte> clone = faceImg.Clone()) { Face facemodel = new Face(); eyesRect = new List<Rectangle>(FindEyes(eyeFileName, clone)); if (eyesRect != null && eyesRect.Count>0) { facemodel.EyesRects = eyesRect; facemodel.EyesCount = eyesRect.Count; } else { continue; } facemodel.FaceImage = clone.Bitmap; facemodel.Height = facemodel.FaceImage.Height; facemodel.Width = facemodel.FaceImage.Width; facemodel.FaceRect = f; facemodel.FramePosX = f.X; facemodel.FramePosY = f.Y; facemodel.ImageFrameSize = image.Size; Gray avgf = new Gray(); MCvScalar avstd = new MCvScalar(); clone.AvgSdv(out avgf, out avstd); facemodel.StdDev = avstd.V0; faces.Add(facemodel); if (faces.Count%5==0) Console.WriteLine("FaceDetect OpenCL every5 Add faceModel" + facemodel.Width); break; } } gray.ROI = Rectangle.Empty; } } } } catch (Exception errFaceDet) { Console.WriteLine("ERROR - faceDetect OpenCL =" + errFaceDet); } return faces; }
private void SetFace(Face face) { // do some logic to set face on screen. }
/// <summary> /// a new face has been detected fires this method /// </summary> /// <param name="sender"></param> /// <param name="face"></param> private void FaceCapture_FaceCaptured(object sender, Face face) { Console.WriteLine(" FaceCapture_FaceCaptured= new face captured" + face); if (face !=null && face.FaceImageFullColr != null && face.FaceImageFullColr.Width >50 ) { using (Image<Bgr, byte> currentFrame = new Image<Bgr, byte>(face.FaceImageFullColr).Resize(faceCapture.reductionRatio, Inter.Cubic)) { double rs = (1.0 * peoplePicture.Size.Width / currentFrame.Size.Width); lastFaceIndex = faceCapture.Faces.Count - 1; Face currentFace = faceCapture.Faces[lastFaceIndex]; /// depreciated v2 - MCvFont f = new MCvFont(CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0); Rectangle rectFace = currentFace.FaceRect; currentFrame.Draw(rectFace, drawBoxColor, 3); String displayFaceData = " W=" + currentFace.Width + " Mo=" + currentFace.MotionPixels; //"F#=" + faceCapture.Faces.Count + " Rx,y=" + rectFace.X + "," + rectFace.Y ; //displayFaceData = displayFaceData + " P=" + currentFace.FramePosX + " W=" + currentFace.Width + " Mo=" + currentFace.MotionPixels; CvInvoke.PutText( currentFrame, displayFaceData, new System.Drawing.Point(1, 60), FontFace.HersheyComplex, rs * 4, new Bgr(0, 0, 255).MCvScalar, 6); peoplePicture.Image = currentFrame.Resize(rs, Inter.Cubic).ToBitmap(); } } }
/// <summary> /// event update() method for processing face captured. /// </summary> /// <param name="sender"></param> /// <param name="face"></param> private void FaceCaptured(object sender, Face face) { // deal with face being captured here. (update) SetFace(face); }
public int findOverlayIndexByFaceData(Face faceTarget) { int olay = 0; int totalNumber = plantLifeImagesOver.Count(); double scaleIndex = (double)faceTarget.FramePosX / 1299.0; if (scaleIndex > 0.99) scaleIndex = 0.99; olay = Convert.ToInt32(scaleIndex * totalNumber); return olay; }