protected override FaceTrackingPanTiltOutput DoProcess(CameraProcessInput baseInput) { var input = new FaceDetectorInput(); input.Captured = baseInput.Captured; input.DetectEyes = false; var result = _faceDetector.Process(input); var targetPoint = CentrePoint; if (result.Faces.Count > 0) { Face faceTarget = result.Faces[0]; targetPoint = faceTarget.Region.Center(); } var outerResult = ReactToTarget(targetPoint); outerResult.Faces.AddRange(result.Faces); if (input.SetCapturedImage) { outerResult.CapturedImage = input.Captured.ToImage <Bgr, byte>(); } return(outerResult); }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var frame = new Mat()) { CameraCapture.Retrieve(frame); var input = new FaceDetectorInput(); input.Captured = frame; input.DetectEyes = chkDetectEyes.Checked; var result = _faceDetector.Process(input); var imageBgr = result.CapturedImage; IImage imageOut = imageBgr; if (chkRectangles.Checked) { foreach (var face in result.Faces) { imageBgr.Draw(face.Region, new Bgr(Color.Red), 2); var eyeCount = 0; foreach (Rectangle eye in face.Eyes) { eyeCount++; imageBgr.Draw(eye, new Bgr(Color.Blue), 2); imageBgr.Draw(eyeCount.ToString(), eye.Location, FontFace.HersheyComplexSmall, 2, new Bgr(Color.Blue)); } } } if (chkSunnies.Checked && result.Faces.Count > 0) { imageOut = WearSunnies(imageBgr, result.Faces[0].Eyes); } imageBox.Image = imageOut; NotifyStatus("Face detection took {0}", result.Elapsed.ToHumanReadable()); } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var frame = new Mat()) { CameraCapture.Retrieve(frame); var input = new FaceDetectorInput(); input.Captured = frame; input.DetectEyes = chkDetectEyes.Checked; input.ClassifierParams = _classiferParams; var result = _faceDetector.Process(input); var imageBgr = result.CapturedImage; if (chkRectangles.Checked) { foreach (var face in result.Faces) { var rectangleColor = new Bgr(Color.Red); imageBgr.Draw(face.Region, rectangleColor, 2); if (chkShowRectDimensions.Checked) { imageBgr.Draw( string.Format("{0}x{1}", face.Region.Width, face.Region.Height) , face.Region.Location , FontFace.HersheyComplexSmall , 2 , rectangleColor); } var eyeCount = 0; foreach (Rectangle eye in face.Eyes) { eyeCount++; imageBgr.Draw(eye, new Bgr(Color.Blue), 2); imageBgr.Draw(eyeCount.ToString(), eye.Location, FontFace.HersheyComplexSmall, 2, new Bgr(Color.Blue)); } } } var inputBgra = imageBgr.Mat.ToImage<Bgra, byte>(); Image<Bgra, byte> output = inputBgra; result.Faces.ForEach(f => { if (chkSunnies.Checked) { output = WearSunnies2(output, f); } if (chkHat.Checked) { output = WearHat(output, f); } }); imageBox.Image = output; NotifyStatus("Face detection took {0}", result.Elapsed.ToHumanReadable()); } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var frame = new Mat()) { CameraCapture.Retrieve(frame); var input = new FaceDetectorInput(); input.Captured = frame; input.DetectEyes = chkDetectEyes.Checked; input.ClassifierParams = _classiferParams; var result = _faceDetector.Process(input); var imageBgr = result.CapturedImage; if (chkRectangles.Checked) { foreach (var face in result.Faces) { var rectangleColor = new Bgr(Color.Red); imageBgr.Draw(face.Region, rectangleColor, 2); if (chkShowRectDimensions.Checked) { imageBgr.Draw( string.Format("{0}x{1}", face.Region.Width, face.Region.Height) , face.Region.Location , FontFace.HersheyComplexSmall , 2 , rectangleColor); } var eyeCount = 0; foreach (Rectangle eye in face.Eyes) { eyeCount++; imageBgr.Draw(eye, new Bgr(Color.Blue), 2); imageBgr.Draw(eyeCount.ToString(), eye.Location, FontFace.HersheyComplexSmall, 2, new Bgr(Color.Blue)); } } } var inputBgra = imageBgr.Mat.ToImage <Bgra, byte>(); Image <Bgra, byte> output = inputBgra; result.Faces.ForEach(f => { if (chkSunnies.Checked) { output = WearSunnies2(output, f); } if (chkHat.Checked) { output = WearHat(output, f); } }); imageBox.Image = output; NotifyStatus("Face detection took {0}", result.Elapsed.ToHumanReadable()); } }