protected override FaceTrackingPanTiltOutput DoProcess(CameraProcessInput baseInput)
        {
            var input = new FaceDetectorInput();

            input.Captured   = baseInput.Captured;
            input.DetectEyes = false;

            var result = _faceDetector.Process(input);

            var targetPoint = CentrePoint;

            if (result.Faces.Count > 0)
            {
                Face faceTarget = result.Faces[0];
                targetPoint = faceTarget.Region.Center();
            }

            var outerResult = ReactToTarget(targetPoint);

            outerResult.Faces.AddRange(result.Faces);

            if (input.SetCapturedImage)
            {
                outerResult.CapturedImage = input.Captured.ToImage <Bgr, byte>();
            }

            return(outerResult);
        }
Beispiel #2
0
        //IntPtr pd = IntPtr.Zero;

        private void ProcessFrame(VideoFrame frame)
        {
            //if (pd == IntPtr.Zero)
            //{
            //    pd = CV.PedestrianDetectorInit();
            //    CV.PedestrianDetectorSetEngineSettings(pd, 0.5, 5, true, Color.YellowGreen);
            //}

            //long time;
            //CVPedestrians items = new CVPedestrians();
            //int count = CV.PedestrianDetectorProcess(pd, frame, ref items, out time);
            //Trace.WriteLine($"Count: {count}, time: {time}");

            var image = frame.ToRAWImage();
            var faces = faceDetector?.Process(image);

            carCounter?.Process(image);
            pedestrianDetector?.Process(image);

            if (cbFDMosaic.Checked)
            {
                if (faces != null)
                {
                    foreach (var face in faces)
                    {
                        var rect = face.Position;
                        rect.Top -= 10;
                        if (rect.Top < 0)
                        {
                            rect.Top = 0;
                        }

                        rect.Left -= 10;
                        if (rect.Left < 0)
                        {
                            rect.Left = 0;
                        }

                        rect.Bottom += 10;
                        if (rect.Bottom > image.Height)
                        {
                            rect.Bottom = image.Height;
                        }

                        rect.Right += 10;
                        if (rect.Right > image.Width)
                        {
                            rect.Right = image.Width;
                        }

                        MFP.EffectMosaicROI(frame.Data, image.Width, image.Height, 45, rect);
                    }
                }
            }
        }
Beispiel #3
0
        //IntPtr pd = IntPtr.Zero;


        private void ProcessFrame(RAWImage frame)
        {
            //if (pd == IntPtr.Zero)
            //{
            //    pd = CV.PedestrianDetectorInit();
            //    CV.PedestrianDetectorSetEngineSettings(pd, 0.5, 5, true, Color.YellowGreen);
            //}

            //long time;
            //CVPedestrians items = new CVPedestrians();
            //int count = CV.PedestrianDetectorProcess(pd, frame, ref items, out time);
            //Trace.WriteLine($"Count: {count}, time: {time}");

            faceDetector?.Process(frame);
            carCounter?.Process(frame);
            pedestrianDetector?.Process(frame);
        }
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            using (var frame = new Mat())
            {
                CameraCapture.Retrieve(frame);

                var input = new FaceDetectorInput();
                input.Captured   = frame;
                input.DetectEyes = chkDetectEyes.Checked;

                var    result   = _faceDetector.Process(input);
                var    imageBgr = result.CapturedImage;
                IImage imageOut = imageBgr;

                if (chkRectangles.Checked)
                {
                    foreach (var face in result.Faces)
                    {
                        imageBgr.Draw(face.Region, new Bgr(Color.Red), 2);

                        var eyeCount = 0;
                        foreach (Rectangle eye in face.Eyes)
                        {
                            eyeCount++;
                            imageBgr.Draw(eye, new Bgr(Color.Blue), 2);
                            imageBgr.Draw(eyeCount.ToString(), eye.Location, FontFace.HersheyComplexSmall, 2, new Bgr(Color.Blue));
                        }
                    }
                }

                if (chkSunnies.Checked && result.Faces.Count > 0)
                {
                    imageOut = WearSunnies(imageBgr, result.Faces[0].Eyes);
                }

                imageBox.Image = imageOut;

                NotifyStatus("Face detection took {0}", result.Elapsed.ToHumanReadable());
            }
        }
Beispiel #5
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            using (var frame = new Mat())
            {
                CameraCapture.Retrieve(frame);

                var input = new FaceDetectorInput();
                input.Captured         = frame;
                input.DetectEyes       = chkDetectEyes.Checked;
                input.ClassifierParams = _classiferParams;

                var result   = _faceDetector.Process(input);
                var imageBgr = result.CapturedImage;

                if (chkRectangles.Checked)
                {
                    foreach (var face in result.Faces)
                    {
                        var rectangleColor = new Bgr(Color.Red);
                        imageBgr.Draw(face.Region, rectangleColor, 2);

                        if (chkShowRectDimensions.Checked)
                        {
                            imageBgr.Draw(
                                string.Format("{0}x{1}", face.Region.Width, face.Region.Height)
                                , face.Region.Location
                                , FontFace.HersheyComplexSmall
                                , 2
                                , rectangleColor);
                        }

                        var eyeCount = 0;
                        foreach (Rectangle eye in face.Eyes)
                        {
                            eyeCount++;
                            imageBgr.Draw(eye, new Bgr(Color.Blue), 2);
                            imageBgr.Draw(eyeCount.ToString(), eye.Location, FontFace.HersheyComplexSmall, 2, new Bgr(Color.Blue));
                        }
                    }
                }

                var inputBgra             = imageBgr.Mat.ToImage <Bgra, byte>();
                Image <Bgra, byte> output = inputBgra;


                result.Faces.ForEach(f =>
                {
                    if (chkSunnies.Checked)
                    {
                        output = WearSunnies2(output, f);
                    }

                    if (chkHat.Checked)
                    {
                        output = WearHat(output, f);
                    }
                });

                imageBox.Image = output;

                NotifyStatus("Face detection took {0}", result.Elapsed.ToHumanReadable());
            }
        }