private int RunDetectAndReport(string imageFileName, FaceData faceData)
        {
            DetectionResult   detectionResult  = null;
            List <ScoredRect> scoredResultList = null;

            try
            {
                _detector.SetTargetDimension(640, 480);
                detectionResult  = _detector.DetectObject(imageFileName);
                scoredResultList = detectionResult.GetMergedRectList(0.0F);
            }
            catch (Exception e)
            {
                Console.WriteLine();
                Console.WriteLine("{0}: {1}", imageFileName, e.Message);
                return(0);
            }

            if (scoredResultList.Count <= 0)
            {
                return(0);
            }

            if (true != ReadPhoto(imageFileName))
            {
                return(0);
            }


            int imageCount = 0;

            foreach (ScoredRect scoredRect in scoredResultList)
            {
                Rect rect = new Rect();

                rect.X      = scoredRect.X;
                rect.Y      = scoredRect.Y;
                rect.Width  = scoredRect.Width;
                rect.Height = scoredRect.Height;

                imageCount += ReportDetectedFace(imageFileName, rect, faceData);
            }

            return(imageCount);
        }
示例#2
0
        public void DetectFile(string file)
        {
            photoImage = Image.FromFile(file);
            imageScale = 1;

            photoRect = new Rectangle(0, 0, (int)(imageScale * photoImage.Size.Width), (int)(imageScale * photoImage.Size.Height));

            DateTime start = DateTime.Now;

            faceDetector.SetTargetDimension(640, 480);

            // Run face detection
            DetectionResult detectionResult = faceDetector.DetectObject(file);

            faceDetectRects = detectionResult.GetMergedRectList(detectionThreshold);
            TimeSpan detectTime = new TimeSpan(DateTime.Now.Ticks - start.Ticks);

            leftEyeRects.Clear();
            rightEyeRects.Clear();

            RunEyeDetection();
        }
示例#3
0
        /// <summary>
        /// Initialize a photo - run faceDetection
        /// </summary>
        /// <param name="mainCanvas">Main canvas reference</param>
        /// <param name="filename">Full path name to image file</param>
        public int InitializeWithFaceDetection(BackgroundCanvas mainCanvas, string filename)
        {
            if (null == _detector)
            {
                _detector = new FaceDetector(
                    mainCanvas.OptionDialog.FaceDetectorDataPath,
                    true,
                    mainCanvas.OptionDialog.FaceDetectorThreshold);
            }
            _detector.SetTargetDimension(mainCanvas.OptionDialog.FaceDetectTargetWidth,
                                         mainCanvas.OptionDialog.FaceDetectTargetHeight);

            DetectionResult   detectionResult  = _detector.DetectObject(filename);
            List <ScoredRect> scoredResultList = detectionResult.GetMergedRectList(0.0F);

            if (scoredResultList.Count < 0)
            {
                return(0);
            }

            List <Rect> faceRects = new List <Rect>();

            foreach (ScoredRect scoredRect in scoredResultList)
            {
                Rect rect = new Rect();

                rect.X      = scoredRect.X;
                rect.Y      = scoredRect.Y;
                rect.Width  = scoredRect.Width;
                rect.Height = scoredRect.Height;

                faceRects.Add(rect);
            }

            _targetRect       = new Rect();
            _faceDisplayWidth = mainCanvas.OptionDialog.FaceDisplayWidth;
            _defaultDPI       = mainCanvas.OptionDialog.DefaultDPI;
            return(InitializeInternal(mainCanvas, filename, faceRects, mainCanvas.OptionDialog.BorderWidth, null));
        }
示例#4
0
        private void DetectFile(string file)
        {
            photoImage = Image.FromFile(file);
            imageScale = Math.Min((float)pictureBox1.Size.Width / photoImage.Size.Width,
                                  (float)pictureBox1.Size.Height / photoImage.Size.Height);

            photoRect = new Rectangle(0, 0, (int)(imageScale * photoImage.Size.Width), (int)(imageScale * photoImage.Size.Height));
            pictureBoxGraphics.Clear(Color.White);

            DateTime start = DateTime.Now;

            faceDetector.SetTargetDimension(640, 480);

            // Run face detection. There are a few ways of doing this. They should
            // all yield the same result. It all depends on the form of your image data
            // Try the different overloads by uncommenting below

            // Method 1 Directly from a System.Drawing.Imaging object.
            // Note only underlying data formats that have 1 byte per colour plane are supported
            Bitmap     bitmap     = new Bitmap(photoImage);
            BitmapData bitmapdata = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height),
                                                    System.Drawing.Imaging.ImageLockMode.ReadOnly, bitmap.PixelFormat);
            DetectionResult detectionResult = faceDetector.DetectObject(bitmapdata);

            bitmap.UnlockBits(bitmapdata);

            // Method 2 - Use the image name works for jpg and some other common formats. Supported
            // formats are not as good as what is supported by the windows decoders
            //DetectionResult detectionResult = faceDetector.DetectObject(imageFile);

            // Method 3. Directly from a byte array. This code is included for illustartion only. It is not a suggested way of
            // actually doing this
            //Bitmap bitmap = new Bitmap(photoImage);
            //BitmapData bitmapdata = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height),
            //                                    System.Drawing.Imaging.ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
            //int byteCount = bitmapdata.Height * bitmapdata.Stride;
            //byte [] bytes = new byte[byteCount];
            //System.Runtime.InteropServices.Marshal.Copy(bitmapdata.Scan0, bytes, 0, byteCount);
            //DetectionResult detectionResult = faceDetector.DetectObject(bitmapdata.Width,
            //                                                            bitmapdata.Height,
            //                                                            bitmapdata.Stride,
            //                                                            3,                  // 3 Bytes per Pixel
            //                                                            bytes);
            //bitmap.UnlockBits(bitmapdata);



            faceDetectRects = detectionResult.GetMergedRectList((float)numericUpDownFaceDetectThreshold.Value);
            TimeSpan detectTime = new TimeSpan(DateTime.Now.Ticks - start.Ticks);

            textBoxFaceDetectTime.Text = detectTime.Milliseconds.ToString();

            leftEyeRects.Clear();
            rightEyeRects.Clear();
            noseRects.Clear();
            leftMouthRects.Clear();
            rightMouthRects.Clear();

            if (true == checkBoxEyeDetect.Checked)
            {
                RunEyeDetection();
            }
        }