public void ProcessFrameForPicture() { if (PictureForm.image == null) { return; } CascadeClassifier _cascadeClassifier; _cascadeClassifier = new CascadeClassifier(Application.StartupPath + "/haarcascade_frontalface_default.xml"); using (var imageFrame = PictureForm.image) { if (imageFrame != null) { var grayframe = imageFrame.Convert <Gray, Byte>(); var faces = _cascadeClassifier.DetectMultiScale(grayframe, 1.1, 10, Size.Empty); //the actual face detection happens here foreach (var face in faces) { Image <Bgr, byte> faceImage = imageFrame.Copy(face); faceImage = ImageHandler.ResizeImage(faceImage); var grayFace = faceImage.Convert <Gray, Byte>(); var result = Recognizer.RecognizeFace(grayFace); if (result > 0) { List <String> names = new List <String>(); var personsDataQuery = from p in FirstPageView.persons select new { p.Name }; foreach (var person in personsDataQuery) { names.Add(person.Name); } string name = names.ElementAt(result - 1); CvInvoke.PutText(imageFrame, name, new Point(face.Location.X + 10, face.Location.Y - 10), Emgu.CV.CvEnum.FontFace.HersheyComplex, 1.0, new Bgr(0, 255, 0).MCvScalar); } imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3); //the detected face(s) is highlighted here using a box that is drawn around it/them } } picBox.Image = imageFrame; } }
private async void PlayVideo() { if (videocapture == null) { return; } CascadeClassifier _cascadeClassifier; _cascadeClassifier = new CascadeClassifier(Application.StartupPath + "/haarcascade_frontalface_default.xml"); try { while (IsPlaying == true && CurrentFrameNo < TotalFrames) { videocapture.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames, CurrentFrameNo); videocapture.Read(CurrentFrame); Image <Bgr, byte> imageFrame = CurrentFrame.ToImage <Bgr, Byte>(); // convert Mat to Emgu.CV.IImage if (imageFrame != null) { var grayframe = imageFrame.Convert <Gray, Byte>(); var faces = _cascadeClassifier.DetectMultiScale(grayframe, 1.1, 10, Size.Empty); //the actual face detection happens here foreach (var face in faces) { Image <Bgr, byte> faceImage = imageFrame.Copy(face); faceImage = ImageHandler.ResizeImage(faceImage); var grayFace = faceImage.Convert <Gray, Byte>(); var result = Recognizer.RecognizeFace(grayFace); if (result != 0) { List <String> names = new List <String>(); var personsDataQuery = from p in FirstPageView.persons select new { p.Name }; foreach (var person in personsDataQuery) { names.Add(person.Name); } string name = names.ElementAt(result - 1); Emgu.CV.CvInvoke.PutText(imageFrame, name, new Point(face.Location.X + 10, face.Location.Y - 10), Emgu.CV.CvEnum.FontFace.HersheyComplex, 1.0, new Bgr(0, 255, 0).MCvScalar); } //imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3); //the detected face(s) is highlighted here using a box that is drawn around it/them } } CurrentFrame = imageFrame.Mat; // convert Emgu.CV.IImage back to Mat pictureBox1.Image = CurrentFrame.Bitmap; // convert Mat to Bitmap trackBar1.Value = CurrentFrameNo; CurrentFrameNo += 1; await Task.Delay(1000 / FPS); } } catch (Exception ex) { MessageBox.Show(ex.Message); } if (CurrentFrameNo == TotalFrames - 1) { CurrentFrameNo = 0; } }