Exemple #1
0
        private Bitmap FacePicDetect(Bitmap bitmap)
        {
            lock (this)
            {
                //留存原始的照片,如果正在拍照,则保存此未添加人脸识别框的照片
                currentPicture = (Bitmap)bitmap.Clone();

                if (detector == null)
                {
                    //先实例化用于检测人脸的对象detector
                    detector = new HaarObjectDetector(new FaceHaarCascade(), 100)
                    {
                        SearchMode            = ObjectDetectorSearchMode.Single,            //搜索模式
                        ScalingMode           = ObjectDetectorScalingMode.GreaterToSmaller, //缩放模式
                        ScalingFactor         = 1.5f,                                       //在搜索期间重新缩放搜索窗口时要使用的重新缩放因子
                        UseParallelProcessing = true
                    };                                                                      //面部级联对象 + 搜索对象时使用的最小窗口大小
                }

                // 开始对检测区域进行检测并返回结果数组
                Rectangle[] regions = detector.ProcessFrame(bitmap);
                if (regions != null && regions.Length > 0)
                {
                    //人脸标记
                    RectanglesMarker marker = new RectanglesMarker(regions, Color.Orange);
                    regions = null;
                    return(marker.Apply(bitmap));
                }
                regions = null;
                return(bitmap);
            }
        }
Exemple #2
0
        public void Init(PictureBox pb)
        {
            FilterInfoCollection VideoCaptuerDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
            VideoCaptureDevice   FinalVideoSource    = new VideoCaptureDevice(VideoCaptuerDevices[0].MonikerString);

            FinalVideoSource.NewFrame += new NewFrameEventHandler((sender, eventArgs) =>
            {
                Bitmap image = (Bitmap)eventArgs.Frame.Clone();

                FaceHaarCascade cascade     = new FaceHaarCascade();
                HaarObjectDetector detector = new HaarObjectDetector(cascade, 80);

                detector.UseParallelProcessing = true;

                Rectangle[] faces = detector.ProcessFrame(image);

                Graphics g = Graphics.FromImage(image);
                foreach (var face in faces)
                {
                    Pen p = new Pen(Color.Red, 10f);
                    g.DrawRectangle(p, face);
                }
                g.Dispose();

                pb.Image = image;
            });

            FinalVideoSource.DesiredFrameRate = 1;
            FinalVideoSource.DesiredFrameSize = new Size(1, 500);
            FinalVideoSource.Start();
        }
        public MainPage()
        {
            InitializeComponent();

            var assembly = Assembly_.GetExecutingAssembly();

            using (var stream = assembly.GetManifestResourceStream("FaceDetection.Images.judybats.jpg"))
            {
                this.ImageView.Source = this.GetImageSourceFromStream(stream);

                stream.Seek(0, SeekOrigin.Begin);
                this.bitmap = ((Bitmap)Image.FromStream(stream)).Clone(PixelFormat.Format32bppArgb);
            }

            foreach (var searchMode in Enum.GetNames(typeof(ObjectDetectorSearchMode)))
            {
                this.SearchModePicker.Items.Add(searchMode);
            }
            foreach (var scalingMode in Enum.GetNames(typeof(ObjectDetectorScalingMode)))
            {
                this.ScalingModePicker.Items.Add(scalingMode);
            }

            this.SearchModePicker.SelectedIndex =
                this.SearchModePicker.Items.IndexOf(ObjectDetectorSearchMode.NoOverlap.ToString());
            this.ScalingModePicker.SelectedIndex =
                this.ScalingModePicker.Items.IndexOf(ObjectDetectorScalingMode.SmallerToGreater.ToString());

            using (var stream = assembly.GetManifestResourceStream("FaceDetection.Files.haarcascade_frontalface_alt.xml"))
            {
                var cascade = HaarCascade.FromXml(stream);
                this.detector = new HaarObjectDetector(cascade, 30);
            }
        }
Exemple #4
0
        private void DisposeForm()
        {
            //窗体关闭前设置
            if (videoSourcePlayer.VideoSource != null)
            {
                videoSourcePlayer.SignalToStop();
                videoSourcePlayer.WaitForStop();
                videoSourcePlayer.VideoSource = null;
            }
            videoDevices      = null;
            videoDevice       = null;
            videoCapabilities = null;
            currentPicture    = null;
            detector          = null;

            if (previewForm != null)
            {
                previewForm.Dispose();
            }
            previewForm = null;

            myImageCodecInfo    = null;
            myEncoder           = null;
            myEncoderParameter  = null;
            myEncoderParameters = null;
        }
Exemple #5
0
        //var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

        public Bitmap ProcessFrame(Bitmap inputImage)
        {
            Image <Bgr, byte> imageFrame = new Image <Bgr, byte>(new Bitmap(inputImage));

            // In this example, we will be creating a cascade for a Face detector:
            // var cascade = HaarCascade.FromXml("filename.xml"); (OpenCV 2.0 format)

            //not overlapping objects!!
            //min size 50 pixels
            var detector = new HaarObjectDetector(_cascade, minSize: 50,
                                                  searchMode: ObjectDetectorSearchMode.NoOverlap);

            Rectangle[] rectangles = detector.ProcessFrame(inputImage);

            foreach (var rectangle in rectangles)
            {
                imageFrame.ROI = rectangle;
                var facePatch = GetBlackRectangle(rectangle);
                CvInvoke.cvCopy(facePatch, imageFrame, IntPtr.Zero);

                //imageFrame.Draw(rectangle, new Bgr(Color.Indigo), 3);
            }
            imageFrame.ROI = Rectangle.Empty;
            return(imageFrame.Bitmap);
        }
        public void ProcessFrame3()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                15, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.three;

            target.ProcessFrame(bmp);

            Assert.AreEqual(3, target.DetectedObjects.Length);
            Assert.AreEqual(180, target.DetectedObjects[0].X);
            Assert.AreEqual(275, target.DetectedObjects[0].Y);
            Assert.AreEqual(41, target.DetectedObjects[0].Width);
            Assert.AreEqual(41, target.DetectedObjects[0].Height);

            Assert.AreEqual(168, target.DetectedObjects[1].X);
            Assert.AreEqual(144, target.DetectedObjects[1].Y);
            Assert.AreEqual(49, target.DetectedObjects[1].Width);
            Assert.AreEqual(49, target.DetectedObjects[1].Height);

            Assert.AreEqual(392, target.DetectedObjects[2].X);
            Assert.AreEqual(133, target.DetectedObjects[2].Y);
            Assert.AreEqual(59, target.DetectedObjects[2].Width);
            Assert.AreEqual(59, target.DetectedObjects[2].Height);


            target = new HaarObjectDetector(cascade,
                                            15, ObjectDetectorSearchMode.Single);

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
        }
        public void MaxSizeTest()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                50, ObjectDetectorSearchMode.Default);

            Bitmap bmp = Properties.Resources.lena_color;

            Rectangle[] result;

            target.MaxSize = new Size(10, 60);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(0, result.Length);

            target.MaxSize = new Size(60, 60);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(1, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }

            target.MaxSize = new Size(80, 80);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(2, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }
        }
        public static Rectangle detectFace(Bitmap bitmap)
        {
            var rectangle = new Rectangle();
            var cascade   = new FaceHaarCascade();
            var detector  = new HaarObjectDetector(cascade, 600);

            detector.SearchMode            = ObjectDetectorSearchMode.Default;
            detector.ScalingFactor         = 1.5F;
            detector.ScalingMode           = ObjectDetectorScalingMode.SmallerToGreater;
            detector.UseParallelProcessing = true;
            detector.Suppression           = 3;

            var faceObjects  = detector.ProcessFrame(bitmap);
            var possbleFaces = new List <Rectangle>();

            foreach (var face in faceObjects)
            {
                if (face.Width > 100 && face.Height > 100)
                {
                    possbleFaces.Add(face);
                }
            }
            if (possbleFaces.Count > 0)
            {
                int x      = possbleFaces.Sum((r) => r.X) / possbleFaces.Count;
                int y      = possbleFaces.Sum((r) => r.Y) / possbleFaces.Count;
                int width  = possbleFaces.Sum((r) => r.Width) / possbleFaces.Count;
                int height = possbleFaces.Sum((r) => r.Width) / possbleFaces.Count;
                rectangle = new Rectangle(x, y, width, height);
            }
            return(rectangle);
        }
        public void ProcessFrame2()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                30, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_gray;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);


            target = new HaarObjectDetector(cascade,
                                            30, ObjectDetectorSearchMode.Default);

            target.ProcessFrame(bmp);

            Assert.AreEqual(6, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);
        }
        /// <summary>
        /// Called when MainForm loads.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void MainForm_Load(object sender, EventArgs e)
        {
            // activate the camera
            SetCamera();

            // set up a haar object detector to find the face
            var faceCascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

            faceDetector               = new HaarObjectDetector(faceCascade);
            faceDetector.MinSize       = new Size(150, 150);
            faceDetector.ScalingFactor = 1.03f;
            faceDetector.SearchMode    = ObjectDetectorSearchMode.Single;
            faceDetector.ScalingMode   = ObjectDetectorScalingMode.SmallerToGreater;

            // set up a new haar object detector to find the nose
            var noseCascade = new Accord.Vision.Detection.Cascades.NoseHaarCascade();

            noseDetector             = new HaarObjectDetector(noseCascade);
            noseDetector.SearchMode  = ObjectDetectorSearchMode.Single;
            noseDetector.ScalingMode = ObjectDetectorScalingMode.SmallerToGreater;

            // load the mustache
            mustache = Bitmap.FromFile(@"./mustache.png") as Bitmap;

            // start the player
            videoPlayer.Start();
        }
Exemple #11
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void MainForm_Load(object sender, EventArgs e)
        {
            watch.Stop();

            #region extracting icon from application to this form window
            Icon = Icon.ExtractAssociatedIcon(Application.ExecutablePath);
            #endregion

            #region Load default Mask Image
            if (File.Exists("mask.png"))
            {
                mask = LoadMask("mask.png");
            }
            else
            {
                mask = Icon.ToBitmap();
            }
            erase = new Bitmap(mask.Width, mask.Height, PixelFormat.Format32bppArgb);
            #endregion

            #region Add file(s) from command line args
            string[] flist = Environment.GetCommandLineArgs();
            lvFiles.Items.Clear();
            foreach (string f in flist)
            {
                if (PhotoExts.Contains(Path.GetExtension(f), StringComparer.CurrentCultureIgnoreCase))
                {
                    if (lvFiles.Items.Count == 0 || lvFiles.FindItemWithText(f, true, 0) == null)
                    {
                        ListViewItem fItem = new ListViewItem(new string[] { Path.GetFileName(f), f });
                        lvFiles.Items.Add(fItem);
                    }
                }
            }
            #endregion

            picPreview.SizeMode = PictureBoxSizeMode.Zoom;
            picPreview.Image    = photo;

            picMask.SizeMode = PictureBoxSizeMode.Zoom;
            picMask.Image    = mask;

            cbMode.DataSource    = Enum.GetValues(typeof(ObjectDetectorSearchMode));
            cbScaling.DataSource = Enum.GetValues(typeof(ObjectDetectorScalingMode));

            cbMode.SelectedItem    = ObjectDetectorSearchMode.NoOverlap;
            cbScaling.SelectedItem = ObjectDetectorScalingMode.SmallerToGreater;

            SearchMode  = ObjectDetectorSearchMode.NoOverlap;
            ScalingMode = ObjectDetectorScalingMode.SmallerToGreater;

            OutSize  = (int)numOutSize.Value;
            faceSize = (int)numFaceSize.Value;

            GrayFirst = chkGrayDetect.Checked;

            HaarCascade cascade = new FaceHaarCascade();
            detector = new HaarObjectDetector(cascade);
        }
Exemple #12
0
        static void Main(string[] args)
        {
            ////ApplyBiTonal
            //Bitmap b = (Bitmap)Image.FromFile("test.png");
            //Processor.ApplyBiTonal(ref b, (byte.MaxValue * 3) / 2, System.Drawing.Color.Red, System.Drawing.Color.White);
            //b.Save("result.png");


            //WebClient wc = new WebClient();
            //byte[] bytes = wc.DownloadData("https://lpportalvhds5rnqyvf495z0.blob.core.windows.net/imagger/408fcdc5-8e91-4dc9-ada8-87287c8cf77f-orinal.jpeg");
            //MemoryStream ms = new MemoryStream(bytes);
            //Bitmap imageBitmap = new Bitmap(ms);
            //Processor.ApplyBrightness(ref imageBitmap, 100);
            ////Processor.ApplyBiTonal(ref imageBitmap, (byte.MaxValue * 3) / 2, System.Drawing.Color.Red, System.Drawing.Color.White);
            //imageBitmap.Save("result.png");


            //WebClient wc = new WebClient();
            //byte[] bytes = wc.DownloadData("https://lpportalvhds5rnqyvf495z0.blob.core.windows.net/imagger/157bed84-5f10-4f43-a5e5-7f0d146e3452-orinal.jpeg");
            //MemoryStream ms = new MemoryStream(bytes);
            //Bitmap imageBitmap = new Bitmap(ms);
            //Processor.ApplyRandomJitter(ref imageBitmap, 20); //, System.Drawing.Color.Red, System.Drawing.Color.White);
            //imageBitmap.Save("result.png");

            //Bitmap br = (Bitmap)Image.FromFile("test.png");
            //Processor.ApplyBrightness(ref br, 100);
            //br.Save("result-br.png");



            #region Detect faces

            WebClient    wc      = new WebClient();
            byte[]       bytes   = wc.DownloadData("https://scontent-fra3-1.xx.fbcdn.net/v/t1.0-1/c0.46.200.200/1724176_10151691353752537_1722497807_n.jpg?oh=19d89af364674bd704cd38613135b4e1&oe=583097F3");
            MemoryStream ms      = new MemoryStream(bytes);
            Bitmap       picture = new Bitmap(ms);

            HaarObjectDetector detector;
            HaarCascade        cascade = new FaceHaarCascade();
            detector = new HaarObjectDetector(cascade, 30);

            detector.SearchMode            = ObjectDetectorSearchMode.NoOverlap;
            detector.ScalingMode           = ObjectDetectorScalingMode.GreaterToSmaller;
            detector.ScalingFactor         = 1.5f;
            detector.UseParallelProcessing = true;

            Rectangle[] objects = detector.ProcessFrame(picture);
            // RandomJitter.ApplyRandomJitterOnRectangulars(ref picture, 20, objects);
            //   RandomJitter.ApplyRandomJitterOnRectangulars2(ref picture, 20, objects);

            RandomJitter.ApplyAddNoise(ref picture, objects);
            picture.Save("result.png");

            //  var p = RandomJitter.ApplyColorMatrix(picture, objects);
            //p.Save("result.png");

            #endregion
        }
 private void RealTimeFaceDetect_Load(object sender, EventArgs e)
 {
     detector    = new HaarObjectDetector(cascade, 30);
     _cameraList = CameraAssistance.Enumerate();
     foreach (var item in _cameraList)
     {
         this.CamerasCbx.Items.Add(item);
     }
 }
Exemple #14
0
        // Constructor
        public MainForm()
        {
            InitializeComponent();

            HaarCascade cascade = new FaceHaarCascade();

            detector = new HaarObjectDetector(cascade, 25, ObjectDetectorSearchMode.Single, 1.2f,
                                              ObjectDetectorScalingMode.GreaterToSmaller);
        }
        public static bool HaveFace(Bitmap imagePassport)
        {
            Rectangle          cloneRect    = new Rectangle((int)(imagePassport.Width * 0.05), (int)(imagePassport.Height / 2.0 + imagePassport.Width * 0.129), (int)(imagePassport.Width * 0.28), (int)(imagePassport.Width * 0.357));
            Bitmap             cloneBitmap  = imagePassport.Clone(cloneRect, imagePassport.PixelFormat);
            HaarObjectDetector faceDetector = new HaarObjectDetector(new FaceHaarCascade(), minSize: 70, searchMode: ObjectDetectorSearchMode.Average);
            // распознаём лица
            IEnumerable <Rectangle> face = faceDetector.ProcessFrame(cloneBitmap);

            return(face.Count() > 0);
        }
        /// <summary>
        /// Processes the image.
        /// </summary>
        /// <param name="factory">
        /// The current instance of the <see cref="T:ImageProcessor.ImageFactory"/> class containing
        /// the image to process.
        /// </param>
        /// <returns>
        /// The processed image from the current instance of the <see cref="T:ImageProcessor.ImageFactory"/> class.
        /// </returns>
        public Image ProcessImage(ImageFactory factory)
        {
            Bitmap newImage = null;
            Bitmap grey     = null;
            Image  image    = factory.Image;

            try
            {
                HaarCascade cascade = this.DynamicParameter;
                grey = new Bitmap(image.Width, image.Height);
                grey.SetResolution(image.HorizontalResolution, image.VerticalResolution);
                grey = MatrixFilters.GreyScale.TransformImage(image, grey);

                HaarObjectDetector detector = new HaarObjectDetector(cascade)
                {
                    SearchMode    = ObjectDetectorSearchMode.NoOverlap,
                    ScalingMode   = ObjectDetectorScalingMode.GreaterToSmaller,
                    ScalingFactor = 1.5f
                };

                // Process frame to detect objects
                Rectangle[] rectangles = detector.ProcessFrame(grey);
                grey.Dispose();

                newImage = new Bitmap(image);
                newImage.SetResolution(image.HorizontalResolution, image.VerticalResolution);
                using (Graphics graphics = Graphics.FromImage(newImage))
                {
                    using (Pen blackPen = new Pen(Color.White))
                    {
                        blackPen.Width = 4;
                        graphics.DrawRectangles(blackPen, rectangles);
                    }
                }

                image.Dispose();
                image = newImage;
            }
            catch (Exception ex)
            {
                if (grey != null)
                {
                    grey.Dispose();
                }

                if (newImage != null)
                {
                    newImage.Dispose();
                }

                throw new ImageProcessingException("Error processing image with " + this.GetType().Name, ex);
            }

            return(image);
        }
Exemple #17
0
        private void btnTrainFace_Click(object sender, EventArgs e)
        {
            try
            {
                detector                       = new HaarObjectDetector(cascade, 30);
                detector.SearchMode            = ObjectDetectorSearchMode.Single;//.NoOverlap;//.Default; //.Average;
                detector.ScalingFactor         = 1.5f;
                detector.ScalingMode           = ObjectDetectorScalingMode.GreaterToSmaller;
                detector.UseParallelProcessing = true;
                detector.Suppression           = 3;

                Bitmap            picture = new Bitmap(pictureBox1.Image);
                Image <Bgr, byte> Frame   = new Image <Bgr, byte>(picture);

                Stopwatch   sw          = Stopwatch.StartNew();
                Rectangle[] faceObjects = detector.ProcessFrame(picture);
                sw.Stop();

                if (faceObjects.Length > 0)
                {
                    RectanglesMarker marker = new RectanglesMarker(faceObjects, Color.Fuchsia);
                    pictureBox1.Image = marker.Apply(picture);

                    //Graphics g = Graphics.FromImage(pictureBox1.Image);
                    foreach (var face in faceObjects)
                    {
                        //g.DrawRectangle(Pens.DeepSkyBlue, face);
                        Frame.Draw(face, new Bgr(Color.Red), 3);
                        Bitmap   c   = Frame.ToBitmap();
                        Bitmap   bmp = new Bitmap(face.Width, face.Height);
                        Graphics gg  = Graphics.FromImage(bmp);
                        gg.DrawImage(c, 0, 0, face, GraphicsUnit.Pixel);
                        pictureBox2.Image = bmp;
                        //bmp.Save("myface(accord) " + DateTime.Now.Second.ToString());
                        gg.Dispose();
                    }
                    //g.Dispose();
                    //label1.Text = "Completed operation!! " + faceObjects.Length.ToString() + " Face detected";
                    MessageBox.Show("Train Face operation successful!!! " + faceObjects.Length.ToString() + " Face detected", "Train face", MessageBoxButtons.OK, MessageBoxIcon.Information);
                    txtFaceId.Text       = genFaceId();
                    txtUsername.Text     = "User" + txtFaceId.Text;
                    btnUpload.Enabled    = false;
                    btnTrainFace.Enabled = false;
                    btnSave.Enabled      = true;
                }
                else
                {
                    MessageBox.Show("Image cannot be trained!!! No face detected in the current image", "Fail to Train face", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
            catch (Exception er)
            {
                MessageBox.Show(er.Message, "Face Detection and Recognition Failure", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Exemple #18
0
        private Bitmap FaceFind(Bitmap bmpBitmap)
        {
            HaarObjectDetector faceDetector = new HaarObjectDetector(new FaceHaarCascade(), minSize: 300, searchMode: ObjectDetectorSearchMode.Single);
            RectanglesMarker   faceMarker   = new RectanglesMarker(Color.Red)
            {
                Rectangles = faceDetector.ProcessFrame(bmpBitmap)
            };

            faceMarker.ApplyInPlace(bmpBitmap);
            return(bmpBitmap);
        }
        public FaceDetector()
        {
            HaarCascade cascade = new FaceHaarCascade();

            detector = new HaarObjectDetector(cascade, 30);

            detector.SearchMode            = ObjectDetectorSearchMode.Default;
            detector.ScalingMode           = ObjectDetectorScalingMode.GreaterToSmaller;
            detector.ScalingFactor         = 1.1f;
            detector.UseParallelProcessing = true;
            detector.Suppression           = 2;
        }
Exemple #20
0
        public FaceDetectionManager()
        {
            this.detector = new HaarObjectDetector(new FaceHaarCascade(),
                                                   25, ObjectDetectorSearchMode.Single, 1.2f,
                                                   ObjectDetectorScalingMode.GreaterToSmaller);

            this.tracker = new Camshift();

            this.InitialTrackingConfiguration();

            this.FaceTracked = new Rectangle();
        }
Exemple #21
0
        static void TestHaar()
        {
            if (Environment.Is64BitProcess)
            {
                throw new Exception("Run in 32-bits");
            }

            // Let's test the detector using a sample video from
            // the collection of test videos in the framework:
            TestVideos ds       = new TestVideos();
            string     fileName = ds["crowd.mp4"];

            // In this example, we will be creating a cascade for a Face detector:
            var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

            // Now, create a new Haar object detector with the cascade:
            var detector = new HaarObjectDetector(cascade, minSize: 25,
                                                  searchMode: ObjectDetectorSearchMode.Average,
                                                  scalingMode: ObjectDetectorScalingMode.SmallerToGreater,
                                                  scaleFactor: 1.1f)
            {
                Suppression = 5 // This should make sure we only report regions as faces if
                                // they have been detected at least 5 times within different cascade scales.
            };

            // Now, let's open the video using FFMPEG:
            var video = new VideoFileReader();

            video.Open(fileName);

            Stopwatch sw = Stopwatch.StartNew();

            // Now, for each frame of the video
            for (int frameIndex = 0; frameIndex < video.FrameCount; frameIndex++)
            {
                // Read the current frame into the bitmap data
                Bitmap bmp = video.ReadVideoFrame(frameIndex);

                // Feed the frame to the tracker
                Rectangle[] faces = detector.ProcessFrame(bmp);

                Console.WriteLine(faces.Length);
                Console.WriteLine(bmp.Flags);
            }

            sw.Stop();

            Console.WriteLine(sw.Elapsed);

            video.Close();
        }
Exemple #22
0
        /// <summary>
        ///   Initializes a new instance of the <see cref="FaceController"/> class.
        /// </summary>
        ///
        public FaceController()
        {
            // Setup tracker
            tracker         = new MatchingTracker();
            tracker.Extract = false;

            // Setup detector
            detector = new HaarObjectDetector(new NoseHaarCascade());
            detector.ScalingFactor = 1.1f;
            detector.SearchMode    = ObjectDetectorSearchMode.Single;
            detector.ScalingMode   = ObjectDetectorScalingMode.SmallerToGreater;
            detector.MinSize       = new Size(2, 5);
            //detector.MaxSize = new Size(15, 18);
        }
Exemple #23
0
        private void Form1_Load(object sender, EventArgs e)
        {
            OpenFileDialog OF          = new OpenFileDialog();
            PictureBox     pictureBox1 = new PictureBox();

            haar     = new FaceHaarCascade();
            detector = new HaarObjectDetector(haar, 30);
            Timer timer1 = new Timer();

            timer1.Tick += new EventHandler(timer1_Tick);
            cap          = new Capture("samplemp4.mp4");
            timer1.Start();
            imageViewer.ShowDialog();
        }
        public void ProcessFrame()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                50, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_color;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(126, target.DetectedObjects[0].X);
            Assert.AreEqual(112, target.DetectedObjects[0].Y);
            Assert.AreEqual(59, target.DetectedObjects[0].Width);
            Assert.AreEqual(59, target.DetectedObjects[0].Height);
        }
Exemple #25
0
        public static List <Rectangle> DetectFaces(Bitmap image)
        {
            List <Rectangle>   xfaces = new List <Rectangle>();
            HaarObjectDetector detector;

            detector = new HaarObjectDetector(new FaceHaarCascade(), 20, ObjectDetectorSearchMode.Average, 1.1f, ObjectDetectorScalingMode.SmallerToGreater);
            detector.UseParallelProcessing = true;
            detector.Suppression           = 2;
            var grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);
            HistogramEqualization filter = new HistogramEqualization();

            filter.ApplyInPlace(grayImage);
            Rectangle[] faces = detector.ProcessFrame(grayImage);
            xfaces.AddRange(faces);
            return(xfaces);
        }
        private void button1_Click(object sender, EventArgs e)
        {
            try
            {
                detector                       = new HaarObjectDetector(cascade, 30);
                detector.SearchMode            = ObjectDetectorSearchMode.Single;            //.Default;//.NoOverlap;//
                detector.ScalingFactor         = 2.5f;
                detector.ScalingMode           = ObjectDetectorScalingMode.SmallerToGreater; //.GreaterToSmaller;
                detector.UseParallelProcessing = true;
                detector.Suppression           = 3;

                Bitmap            picture = new Bitmap(pictureBox2.Image);
                Image <Bgr, byte> Frame   = new Image <Bgr, byte>(picture);

                Stopwatch sw = Stopwatch.StartNew();

                Rectangle[] faceObjects = detector.ProcessFrame(picture);

                sw.Stop();

                if (faceObjects.Length > 0)
                {
                    RectanglesMarker marker = new RectanglesMarker(faceObjects, Color.Fuchsia);
                    pictureBox2.Image = marker.Apply(picture);
                }
                label1.Text = "Operation Completed!!! " + faceObjects.Length.ToString() + " Face detected";

                Graphics g = Graphics.FromImage(pictureBox2.Image);
                foreach (var face in faceObjects)
                {
                    g.DrawRectangle(Pens.DeepSkyBlue, face);
                    Frame.Draw(face, new Bgr(Color.Red), 3);
                    Bitmap   c   = Frame.ToBitmap();
                    Bitmap   bmp = new Bitmap(face.Width, face.Height);
                    Graphics gg  = Graphics.FromImage(bmp);
                    gg.DrawImage(c, 0, 0, face, GraphicsUnit.Pixel);
                    //pictureBox2.Image = bmp;
                    bmp.Save("myface(accord) " + DateTime.Now.Second.ToString());
                    MessageBox.Show("Face Detected!!!, Face Save as:" + "myface(accord) " + DateTime.Now.Second.ToString(), "Face Detection Successfully", MessageBoxButtons.OK, MessageBoxIcon.Information);
                }
                //g.Dispose()
            }
            catch (Exception er)
            {
                MessageBox.Show(er.Message, "Face Detection Failure", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Exemple #27
0
        public void ProcessFrame()
        {
            #region doc_example
            // In order to use a HaarObjectDetector, first we have to tell it
            // which type of objects we would like to detect. And in a Haar detector,
            // different object classifiers are specified in terms of a HaarCascade.

            // The framework comes with some built-in cascades for common body
            // parts, such as Face and Nose. However, it is also possible to
            // load a cascade from cascade XML definitions in OpenCV 2.0 format.

            // In this example, we will be creating a cascade for a Face detector:
            var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

            // Note: In the case we would like to load it from XML, we could use:
            // var cascade = HaarCascade.FromXml("filename.xml");

            // Now, create a new Haar object detector with the cascade:
            var detector = new HaarObjectDetector(cascade, minSize: 50,
                                                  searchMode: ObjectDetectorSearchMode.NoOverlap);

            // Note that we have specified that we do not want overlapping objects,
            // and that the minimum object an object can have is 50 pixels. Now, we
            // can use the detector to classify a new image. For instance, consider
            // the famous Lena picture:

            Bitmap bmp = Accord.Imaging.Image.Clone(Resources.lena_color);

            // We have to call ProcessFrame to detect all rectangles containing the
            // object we are interested in (which in this case, is the face of Lena):
            Rectangle[] rectangles = detector.ProcessFrame(bmp);

            // The answer will be a single rectangle of dimensions
            //
            //   {X = 126 Y = 112 Width = 59 Height = 59}
            //
            // which indeed contains the only face in the picture.
            #endregion

            Assert.AreEqual(1, detector.DetectedObjects.Length);
            Assert.AreEqual(126, detector.DetectedObjects[0].X);
            Assert.AreEqual(112, detector.DetectedObjects[0].Y);
            Assert.AreEqual(59, detector.DetectedObjects[0].Width);
            Assert.AreEqual(59, detector.DetectedObjects[0].Height);
        }
        public MainForm()
        {
            InitializeComponent();

            pictureBox1.Image = picture;

            cbMode.DataSource    = Enum.GetValues(typeof(ObjectDetectorSearchMode));
            cbScaling.DataSource = Enum.GetValues(typeof(ObjectDetectorScalingMode));

            cbMode.SelectedItem    = ObjectDetectorSearchMode.NoOverlap;
            cbScaling.SelectedItem = ObjectDetectorScalingMode.SmallerToGreater;

            toolStripStatusLabel1.Text = "Please select the detector options and click Detect to begin.";

            HaarCascade cascade = new FaceHaarCascade();

            detector = new HaarObjectDetector(cascade, 30);
        }
Exemple #29
0
        public ImageController(PictureBox image)
        {
            this.image = image;

            /*
             * cbMode.DataSource = Enum.GetValues(typeof(ObjectDetectorSearchMode));
             * cbScaling.DataSource = Enum.GetValues(typeof(ObjectDetectorScalingMode));
             *
             * cbMode.SelectedItem = ObjectDetectorSearchMode.NoOverlap;
             * cbScaling.SelectedItem = ObjectDetectorScalingMode.SmallerToGreater;
             */

            //toolStripStatusLabel1.Text = "Please select the detector options and click Detect to begin.";

            HaarCascade cascade = new FaceHaarCascade();

            detector = new HaarObjectDetector(cascade, 30);
        }
        public Tracking_face()
        {
            InitializeComponent();

            try
            {
                // enumerate video devices
                videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);

                if (videoDevices.Count == 0)
                {
                    throw new ApplicationException();
                }

                // add all devices to combo
                foreach (FilterInfo device in videoDevices)
                {
                    devicesCombo.Items.Add(device.Name);
                }
                HaarCascade cascade = new FaceHaarCascade();
                detector = new HaarObjectDetector(cascade,
                                                  25, ObjectDetectorSearchMode.Single, 1.2f,
                                                  ObjectDetectorScalingMode.GreaterToSmaller);
            }
            catch (ApplicationException)
            {
                devicesCombo.Items.Add("No local capture devices");
                devicesCombo.Enabled = false;
            }

            devicesCombo.SelectedIndex = 1;

            // create video source
            VideoCaptureDevice videoSource = new VideoCaptureDevice(videoDevices[devicesCombo.SelectedIndex].MonikerString);

            // set frame size
            videoSource.VideoResolution = selectResolution(videoSource);

            // open it
            OpenVideoSource(videoSource);

            detecting = true;
        }