Example #1
0
        public List <int> FindFacePoints(string image)
        {
            DetectFile(image);

            if (this.faceDetectRects.Count != 1)
            {
                string error = String.Format("Found {0} faces (not exactly 1!) in image {1}", this.faceDetectRects.Count, image);
                throw new System.Exception(error);
            }

            EyeDetectResult eyeResult = this.eyeDetectResults[0];
            double          lx        = eyeResult.LeftEye.X;
            double          ly        = eyeResult.LeftEye.Y;
            double          rx        = eyeResult.RightEye.X;
            double          ry        = eyeResult.RightEye.Y;

            if (eyeResult is FaceFeatureResult)
            {
                FaceFeatureResult faceResult = eyeResult as FaceFeatureResult;
                return(FaceAnchorPoints(lx, ly, rx, ry,
                                        (faceResult.LeftMouth.X + faceResult.RightMouth.X) / 2, (faceResult.LeftMouth.Y + faceResult.RightMouth.Y) / 2));
            }
            else
            {
                return(FaceAnchorPoints(lx, ly, rx, ry));
            }
        }
Example #2
0
        //static void RunDetection(string filename, Rect rect, Point leftEye, Point rightEye, ref byte [] facePix, Rect faceRect)
        static void RunDetection(string filename, Rect rect, FaceDisp.FaceData faceData, ref byte [] facePix, Rect faceRect)
        {
            EyeDetect eyeDetect       = new EyeDetect();
            int       byteCountPerPix = (int)(facePix.Length / faceRect.Width / faceRect.Height);

            bool isSuccess = eyeDetect.SetAlgorithm(_algo, _algoData);

            if (true == isSuccess)
            {
                EyeDetectResult eyeResult = eyeDetect.Detect(facePix, (int)_faceDisplayWidth, (int)_faceDisplayWidth);

                _outStream.WriteLine("{0}", filename);
                _outStream.Write("{0} {1} {2} {3} ", (int)rect.X, (int)rect.Y, (int)rect.Width, (int)rect.Height);

                if (faceData.TrueLeftEye.X > 1.0)
                {
                    Point leftEye  = FaceFeatureToScaledPoint(faceData.TrueLeftEye, rect);
                    Point rightEye = FaceFeatureToScaledPoint(faceData.TrueRightEye, rect);

                    _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", leftEye.X, leftEye.Y, rightEye.X, rightEye.Y);
                }
                else
                {
                    _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", faceData.TrueLeftEye.X, faceData.TrueLeftEye.Y, faceData.TrueRightEye.X, faceData.TrueRightEye.Y);
                }
                _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", eyeResult.LeftEye.X / _faceDisplayWidth, eyeResult.LeftEye.Y / _faceDisplayWidth,
                                 eyeResult.RightEye.X / _faceDisplayWidth, eyeResult.RightEye.Y / _faceDisplayWidth);

                FaceFeatureResult res = eyeResult as FaceFeatureResult;
                if (null != res)
                {
                    if (faceData.Nose.X > 1.0)
                    {
                        Point nose       = FaceFeatureToScaledPoint(faceData.Nose, rect);
                        Point leftMouth  = FaceFeatureToScaledPoint(faceData.LeftMouth, rect);
                        Point rightMouth = FaceFeatureToScaledPoint(faceData.RightMouth, rect);

                        _outStream.Write("{0:F3} {1:F3} ", nose.X, nose.Y);
                        _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", leftMouth.X, leftMouth.Y, rightMouth.X, rightMouth.Y);
                    }
                    else
                    {
                        _outStream.Write("{0:F3} {1:F3} ", faceData.Nose.X, faceData.Nose.Y);
                        _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", faceData.LeftMouth.X, faceData.LeftMouth.Y, faceData.RightMouth.X, faceData.RightMouth.Y);
                    }
                    _outStream.Write("{0:F3} {1:F3} ", res.Nose.X / _faceDisplayWidth, res.Nose.Y / _faceDisplayWidth);
                    _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", res.LeftMouth.X / _faceDisplayWidth, res.LeftMouth.Y / _faceDisplayWidth,
                                     res.RightMouth.X / _faceDisplayWidth, res.RightMouth.Y / _faceDisplayWidth);
                }
                if (_maxTransformCount > 0)
                {
                    _outStream.Write("{0:F3} {1:F3} {2:F3}", _transform.Theta, _transform.X, _transform.Y);
                }
                _outStream.WriteLine();
            }
            else
            {
                _outStream.WriteLine("Detection failed on {0}", filename);
            }
        }
Example #3
0
        /// <summary>
        /// Detect eyes in each detected face. Note the eye detector runs only on the face detected
        /// portion  of a photo, so face detection must be run first.
        /// In this method the whole photo is passed to the eye detector togetehr with a face rect
        /// The eye detector extracts the face, scales it and converts to gryscale before runningthe detector
        /// If your calling code has already extracted and converted the input photo then
        /// it is much more efficient to call the eye Detect method that accepts this data
        /// </summary>
        private void RunEyeDetection()
        {
            Bitmap     photoBitMap = (Bitmap)photoImage;
            Rectangle  rect        = new Rectangle(0, 0, photoBitMap.Width, photoBitMap.Height);
            BitmapData data        = photoBitMap.LockBits(rect, ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);

            int bytes = data.Stride * photoBitMap.Height;

            byte[] rgbValues = new byte[bytes];

            // Copy the RGB values into the array.
            System.Runtime.InteropServices.Marshal.Copy(data.Scan0, rgbValues, 0, bytes);

            DateTime start = DateTime.Now;

            foreach (ScoredRect r in faceDetectRects)
            {
                Rectangle faceRect = new Rectangle(r.X, r.Y, r.Width, r.Height);

                // This is fairly inefficient as the the face must first be extracted and scaled before eye detecion is run

                EyeDetectResult eyeResult   = eyeDetect.Detect(rgbValues, photoBitMap.Width, photoBitMap.Height, data.Stride, faceRect);
                float           eyeRectLen  = eyeMark * faceRect.Width;
                float           eyeRectLen2 = eyeRectLen / 2.0F;

                // Save the rects that will be displayed
                leftEyeRects.Add(new RectangleF((float)eyeResult.LeftEye.X - eyeRectLen2,
                                                (float)eyeResult.LeftEye.Y - eyeRectLen2,
                                                eyeRectLen, eyeRectLen));
                rightEyeRects.Add(new RectangleF((float)eyeResult.RightEye.X - eyeRectLen2,
                                                 (float)eyeResult.RightEye.Y - eyeRectLen2,
                                                 eyeRectLen, eyeRectLen));

                if (eyeResult is FaceFeatureResult)
                {
                    FaceFeatureResult faceResult = eyeResult as FaceFeatureResult;
                    noseRects.Add(new RectangleF((float)faceResult.Nose.X - eyeRectLen2,
                                                 (float)faceResult.Nose.Y - eyeRectLen2,
                                                 eyeRectLen, eyeRectLen));
                    leftMouthRects.Add(new RectangleF((float)faceResult.LeftMouth.X - eyeRectLen2,
                                                      (float)faceResult.LeftMouth.Y - eyeRectLen2,
                                                      eyeRectLen, eyeRectLen));
                    rightMouthRects.Add(new RectangleF((float)faceResult.RightMouth.X - eyeRectLen2,
                                                       (float)faceResult.RightMouth.Y - eyeRectLen2,
                                                       eyeRectLen, eyeRectLen));
                }
            }
            TimeSpan detectTime = new TimeSpan(DateTime.Now.Ticks - start.Ticks);

            textBoxEyeDetect.Text = detectTime.Milliseconds.ToString();

            photoBitMap.UnlockBits(data);
        }
Example #4
0
        public void PrintResults()
        {
            for (int f = 0; f < faceDetectRects.Count; f++)
            {
                ScoredRect      r = faceDetectRects[f];
                EyeDetectResult e = eyeDetectResults[f];
                Console.Write("Detect {0} {1} {2} {3}  ", r.X, r.Y, r.Width, r.Height);
                Console.Write("Eye {0} {1} {2} {3}  ", e.LeftEye.X, e.LeftEye.Y, e.RightEye.X, e.RightEye.Y);
                if (e is FaceFeatureResult)
                {
                    FaceFeatureResult faceResult = e as FaceFeatureResult;
                    Console.Write("Nose {0} {1}  ", faceResult.Nose.X, faceResult.Nose.Y);
                    Console.Write("Mouth {0} {1} {2} {3}  ", faceResult.LeftMouth.X, faceResult.LeftMouth.Y, faceResult.RightMouth.X, faceResult.RightMouth.Y);
                }


                Console.WriteLine();
            }
        }