private bool DoEyeDetect(Rect faceRect, out System.Windows.Point leftEye, out System.Windows.Point rightEye) { leftEye = new System.Windows.Point(); rightEye = new System.Windows.Point(); if (null == _facePixs) { return(false); } if (null == _eyeDetect) { _eyeDetect = new EyeDetect(); } EyeDetectResult eyeResult = _eyeDetect.Detect(_facePixs, (int)_eyeDetectFaceRect.Width, (int)_eyeDetectFaceRect.Height); leftEye.X = eyeResult.LeftEye.X * faceRect.Width / _eyeDetectFaceRect.Width + faceRect.X; leftEye.Y = eyeResult.LeftEye.Y * faceRect.Height / _eyeDetectFaceRect.Height + faceRect.Y; rightEye.X = eyeResult.RightEye.X * faceRect.Width / _eyeDetectFaceRect.Width + faceRect.X; rightEye.Y = eyeResult.RightEye.Y * faceRect.Height / _eyeDetectFaceRect.Height + faceRect.Y; return(true); }
//static void RunDetection(string filename, Rect rect, Point leftEye, Point rightEye, ref byte [] facePix, Rect faceRect) static void RunDetection(string filename, Rect rect, FaceDisp.FaceData faceData, ref byte [] facePix, Rect faceRect) { EyeDetect eyeDetect = new EyeDetect(); int byteCountPerPix = (int)(facePix.Length / faceRect.Width / faceRect.Height); bool isSuccess = eyeDetect.SetAlgorithm(_algo, _algoData); if (true == isSuccess) { EyeDetectResult eyeResult = eyeDetect.Detect(facePix, (int)_faceDisplayWidth, (int)_faceDisplayWidth); _outStream.WriteLine("{0}", filename); _outStream.Write("{0} {1} {2} {3} ", (int)rect.X, (int)rect.Y, (int)rect.Width, (int)rect.Height); if (faceData.TrueLeftEye.X > 1.0) { Point leftEye = FaceFeatureToScaledPoint(faceData.TrueLeftEye, rect); Point rightEye = FaceFeatureToScaledPoint(faceData.TrueRightEye, rect); _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", leftEye.X, leftEye.Y, rightEye.X, rightEye.Y); } else { _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", faceData.TrueLeftEye.X, faceData.TrueLeftEye.Y, faceData.TrueRightEye.X, faceData.TrueRightEye.Y); } _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", eyeResult.LeftEye.X / _faceDisplayWidth, eyeResult.LeftEye.Y / _faceDisplayWidth, eyeResult.RightEye.X / _faceDisplayWidth, eyeResult.RightEye.Y / _faceDisplayWidth); FaceFeatureResult res = eyeResult as FaceFeatureResult; if (null != res) { if (faceData.Nose.X > 1.0) { Point nose = FaceFeatureToScaledPoint(faceData.Nose, rect); Point leftMouth = FaceFeatureToScaledPoint(faceData.LeftMouth, rect); Point rightMouth = FaceFeatureToScaledPoint(faceData.RightMouth, rect); _outStream.Write("{0:F3} {1:F3} ", nose.X, nose.Y); _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", leftMouth.X, leftMouth.Y, rightMouth.X, rightMouth.Y); } else { _outStream.Write("{0:F3} {1:F3} ", faceData.Nose.X, faceData.Nose.Y); _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", faceData.LeftMouth.X, faceData.LeftMouth.Y, faceData.RightMouth.X, faceData.RightMouth.Y); } _outStream.Write("{0:F3} {1:F3} ", res.Nose.X / _faceDisplayWidth, res.Nose.Y / _faceDisplayWidth); _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", res.LeftMouth.X / _faceDisplayWidth, res.LeftMouth.Y / _faceDisplayWidth, res.RightMouth.X / _faceDisplayWidth, res.RightMouth.Y / _faceDisplayWidth); } if (_maxTransformCount > 0) { _outStream.Write("{0:F3} {1:F3} {2:F3}", _transform.Theta, _transform.X, _transform.Y); } _outStream.WriteLine(); } else { _outStream.WriteLine("Detection failed on {0}", filename); } }
/// <summary> /// Detect eyes in each detected face. Note the eye detector runs only on the face detected /// portion of a photo, so face detection must be run first. /// In this method the whole photo is passed to the eye detector togetehr with a face rect /// The eye detector extracts the face, scales it and converts to gryscale before runningthe detector /// If your calling code has already extracted and converted the input photo then /// it is much more efficient to call the eye Detect method that accepts this data /// </summary> private void RunEyeDetection() { Bitmap photoBitMap = (Bitmap)photoImage; Rectangle rect = new Rectangle(0, 0, photoBitMap.Width, photoBitMap.Height); BitmapData data = photoBitMap.LockBits(rect, ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb); int bytes = data.Stride * photoBitMap.Height; byte[] rgbValues = new byte[bytes]; // Copy the RGB values into the array. System.Runtime.InteropServices.Marshal.Copy(data.Scan0, rgbValues, 0, bytes); DateTime start = DateTime.Now; foreach (ScoredRect r in faceDetectRects) { Rectangle faceRect = new Rectangle(r.X, r.Y, r.Width, r.Height); // This is fairly inefficient as the the face must first be extracted and scaled before eye detecion is run EyeDetectResult eyeResult = eyeDetect.Detect(rgbValues, photoBitMap.Width, photoBitMap.Height, data.Stride, faceRect); float eyeRectLen = eyeMark * faceRect.Width; float eyeRectLen2 = eyeRectLen / 2.0F; // Save the rects that will be displayed leftEyeRects.Add(new RectangleF((float)eyeResult.LeftEye.X - eyeRectLen2, (float)eyeResult.LeftEye.Y - eyeRectLen2, eyeRectLen, eyeRectLen)); rightEyeRects.Add(new RectangleF((float)eyeResult.RightEye.X - eyeRectLen2, (float)eyeResult.RightEye.Y - eyeRectLen2, eyeRectLen, eyeRectLen)); if (eyeResult is FaceFeatureResult) { FaceFeatureResult faceResult = eyeResult as FaceFeatureResult; noseRects.Add(new RectangleF((float)faceResult.Nose.X - eyeRectLen2, (float)faceResult.Nose.Y - eyeRectLen2, eyeRectLen, eyeRectLen)); leftMouthRects.Add(new RectangleF((float)faceResult.LeftMouth.X - eyeRectLen2, (float)faceResult.LeftMouth.Y - eyeRectLen2, eyeRectLen, eyeRectLen)); rightMouthRects.Add(new RectangleF((float)faceResult.RightMouth.X - eyeRectLen2, (float)faceResult.RightMouth.Y - eyeRectLen2, eyeRectLen, eyeRectLen)); } } TimeSpan detectTime = new TimeSpan(DateTime.Now.Ticks - start.Ticks); textBoxEyeDetect.Text = detectTime.Milliseconds.ToString(); photoBitMap.UnlockBits(data); }