Пример #1
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = new Mat();

            capture.Retrieve(frame, 0);
            Mat  image = frame; //Read the files as an 8-bit Bgr image
            long detectionTime;
            List faces = new List();
            List eyes  = new List();
            //The cuda cascade classifier doesn't seem to be able to load "haarcascade_frontalface_default.xml" file in this release
            //disabling CUDA module for now
            bool tryUseCuda   = false;
            bool tryUseOpenCL = true;

            DetectFace.Detect(
                image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                faces, eyes,
                tryUseCuda,
                tryUseOpenCL,
                out detectionTime);
            foreach (Rectangle face in faces)
            {
                CvInvoke.Rectangle(image, face, new Bgr(Color.Purple).MCvScalar, 3);
                Bitmap   c   = frame.Bitmap;
                Bitmap   bmp = new Bitmap(face.Size.Width, face.Size.Height);
                Graphics g   = Graphics.FromImage(bmp);
                g.DrawImage(c, 0, 0, face, GraphicsUnit.Pixel);
            }
            foreach (Rectangle eye in eyes)
            {
                CvInvoke.Rectangle(image, eye, new Bgr(Color.Green).MCvScalar, 2);
            }
            imageBox1.Image = frame;
        }
Пример #2
0
        private static void DetectFaces()
        {
            while (true)
            {
                System.Threading.Thread.Sleep(100);

                Mat frame = _cameraCapture.QueryFrame();

                long             detectionTime;
                List <Rectangle> faces = new List <Rectangle>();
                List <Rectangle> eyes  = new List <Rectangle>();

                DetectFace.Detect(
                    frame, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                    faces, eyes,
                    out detectionTime);

                foreach (Rectangle face in faces)
                {
                    CvInvoke.Rectangle(frame, face, new Bgr(Color.Red).MCvScalar, 2);


                    var picture = frame.Bitmap.Clone(face, PixelFormat.DontCare);

                    picture.Save("C:\\Images\\" + Guid.NewGuid().ToString() + ".bmp", ImageFormat.Bmp);
                    Console.WriteLine("Face Identified");
                }
            }
        }
Пример #3
0
        private void buttonFaceExtract_Click(object sender, EventArgs e)
        {
            string dataDirectory = directoryFaceExtract.Text;

            string[] files = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);

            foreach (var file in files)
            {
                Image <Bgr, byte> fullImage = new Image <Bgr, byte>(file);

                List <Rectangle> faces = new List <Rectangle>();
                List <Rectangle> eyes  = new List <Rectangle>();
                long             detectionTime;
                DetectFace.Detect(fullImage, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
                foreach (Rectangle face in faces)
                {
                    string fileNameImage = fileNameforExtract(file);
                    fullImage.ROI = face;
                    Directory.CreateDirectory("TrainedFaces");
                    fullImage.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC).ToBitmap().Save("TrainedFaces\\" + fileNameImage);
                    richTextBox1.Text   += fileNameImage + "\n";
                    secondImageBox.Image = fullImage;
                }
            }
        }
Пример #4
0
        /// <summary>
        /// 转换为 CameraResult
        /// </summary>
        /// <returns></returns>
        private CameraResult ToCameraResult()
        {
            long             detectionTime;
            bool             tryUseCuda   = false;
            bool             tryUseOpenCL = true;
            List <Rectangle> faces        = new List <Rectangle>();
            List <Rectangle> eyes         = new List <Rectangle>();

            //人脸及眼睛识别
            this.imageCV = new Image <Bgr, byte>(this.bitmap);
            DetectFace.Detect(
                this.imageCV.Mat, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                faces, eyes,
                tryUseCuda,
                tryUseOpenCL,
                out detectionTime);

            //foreach (Rectangle face in faces)
            //    CvInvoke.Rectangle(this.imageCV, face, new Bgr(Color.Red).MCvScalar, 2);
            //foreach (Rectangle eye in eyes)
            //    CvInvoke.Rectangle(this.imageCV, eye, new Bgr(Color.Blue).MCvScalar, 2);
            //this.bitmap = this.imageCV.ToBitmap();
            this.imageCV.Dispose();

            this.BitmapStream.Position = 0;
            this.bitmap.Save(this.BitmapStream, ImageFormat.Jpeg);
            this.BitmapStream.Position = 0;
            var length    = this.BitmapStream.Read(this.BitmapBuffer, 0, (int)this.BitmapStream.Length);
            var imgBase64 = Convert.ToBase64String(this.BitmapBuffer, 0, length);

            return(new CameraResult {
                imgBase64 = imgBase64, EyesMark = eyes.ToListMark(), FacesMark = faces.ToListMark()
            });
        }
Пример #5
0
        private void StartConsumerLoop(CancellationToken cancellationToken)
        {
            //subcribe duoc nhieu keu
            kafkaConsumer.Subscribe(Channels);

            while (!cancellationToken.IsCancellationRequested)
            {
                try
                {
                    var cr = this.kafkaConsumer.Consume(cancellationToken);
                    if (cr.Topic.Equals("REGISTER_FACE_RESPONSE"))
                    {
                        Console.ForegroundColor = ConsoleColor.Green;
                        Console.WriteLine("REGISTER_FACE_RESPONSE --- " + cr.Message.Value);
                        Console.ForegroundColor = ConsoleColor.White;
                        RegisterFace registerFace = new RegisterFace();
                        registerFace = JsonConvert.DeserializeObject <RegisterFace>(cr.Message.Value);
                        HandleTaskRegister.Action(registerFace.Record, registerFace);
                    }
                    if (cr.Topic.Equals("DETECT_FACE_RESPONSE"))
                    {
                        Console.ForegroundColor = ConsoleColor.Green;
                        Console.WriteLine("DETECT_FACE_RESPONSE --- " + cr.Message.Value);
                        Console.ForegroundColor = ConsoleColor.White;
                        DetectFace detectFace = new DetectFace();
                        detectFace = JsonConvert.DeserializeObject <DetectFace>(cr.Message.Value);
                        HandleTaskDetect.Action(detectFace.Record, detectFace);
                    }
                    if (cr.Topic.Equals("REMOVE_TOPIC_RESPONSE"))
                    {
                        Console.ForegroundColor = ConsoleColor.Green;
                        Console.WriteLine("REMOVE_TOPIC_RESPONSE --- " + cr.Message.Value);
                        Console.ForegroundColor = ConsoleColor.White;
                        DeleteFace deleteFace = new DeleteFace();
                        deleteFace = JsonConvert.DeserializeObject <DeleteFace>(cr.Message.Value);
                        HandleTaskDelete.Action(deleteFace.Id, deleteFace);
                    }
                }
                catch (OperationCanceledException)
                {
                    break;
                }
                catch (ConsumeException e)
                {
                    // Consumer errors should generally be ignored (or logged) unless fatal.
                    Console.WriteLine($"Consume error: {e.Error.Reason}");

                    if (e.Error.IsFatal)
                    {
                        break;
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine($"Unexpected error: {e}");
                    break;
                }
            }
        }
Пример #6
0
        string Run(string fname)
        {
            IImage image;

            //Read the files as an 8-bit Bgr image

            image = new UMat(fname, ImreadModes.Color); //UMat version
            //image = new Mat("lena.jpg", ImreadModes.Color); //CPU version
            //   listmMats.Add(image);
            //
            var aaa = new Image <Emgu.CV.Structure.Gray, byte>(fname);

            trainingImages.Add(aaa);

            Names_List_ID.Add(Names_List_ID.Count());

            long             detectionTime;
            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();

            //   richTextBox1.AppendText(Environment.NewLine + "aaa");
            DetectFace.Detect(
                image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                faces, eyes,
                out detectionTime);

            foreach (Rectangle face in faces)
            {
                CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 2);
            }
            foreach (Rectangle eye in eyes)
            {
                CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 2);
            }
            //   richTextBox1.AppendText(Environment.NewLine + "bbb");
            //display the image
            using (InputArray iaImage = image.GetInputArray())
            {
                // recognizer.Train(image, );
                var tempfile = Path.GetTempFileName() + ".jpg";
                //  richTextBox1.AppendText(Environment.NewLine + "ccc");
                image.Save(tempfile);
                // aaa.Save(tempfile);
                // recognizer.Train(image, Names_List_ID.ToArray());
                // listmMats.Add(iaImage);
                //var a = new ImageViewer();
                //a.BackgroundImage = ;
                //a.ShowDialog();
                //ImageViewer.Show(image, String.Format(
                //  //    ImageViewer.Show(image, String.Format(
                //    "Completed face and eye detection using {0} in {1} milliseconds",
                //    (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) ? "CUDA" :
                //    (iaImage.IsUMat && CvInvoke.UseOpenCL) ? "OpenCL"
                //    : "CPU",
                //    detectionTime));
                richTextBox1.AppendText(Environment.NewLine + "eee");
                return(tempfile);
            }
        }
Пример #7
0
    /// <summary>
    /// 采集到新图片数据帧的callback函数
    /// </summary>
    /// <param name="sender"></param>
    /// <param name="eventArgs"></param>
    void CaptureNewFrame(Object sender, NewFrameEventArgs eventArgs)
    {
        // Wroking on switch turn on, only process once when enabled by timer outside
        if (processOnNewFrame)
        {
            //refresh captured image
            captureImage = (Bitmap)eventArgs.Frame.Clone();
            //start once and capture only one image
            // Always On process, send captured image to event
            OnImageCaptured(new CameraEventArgs(captureImage));

            //call the local face detect method
            if (localFaceDetect)
            {
                Bitmap detectImage = (Bitmap)eventArgs.Frame.Clone();

                long             detectionTime;
                List <Rectangle> faces = new List <Rectangle>();
                DetectFace.Detect(detectImage, "haarcascade_frontalface_default.xml", faces, out detectionTime);
                if (faces.Count <= 0)
                {
                    //no face in the image
                }
                else
                {
                    if (localFaceDistanceFilter)
                    {
                        for (int i = 0; i < faces.Count; i++)
                        {
                            Rectangle face = faces[i];
                            if ((face.Size.Height <= ThresholdSize.Height) || (face.Size.Width <= ThresholdSize.Width))
                            {
                                faces.Remove(face);
                            }
                        }
                    }
                    if (faces.Count > 0)
                    {
                        //captureImage = DetectFace.Show(captureImage, faces);

                        //var face = DetectFace.SeprateFace(captureImage, faces);
                        var face = DetectFace.PickOneBigFace(detectImage, faces);

                        //only raise event when there is close enough face
                        //OnImageCaptured(new CameraEventArgs(captureImage));
                        OnFaceCaptured(new CameraFaceEventArgs(face));
                    }
                }
            }
            else
            {
                OnFaceCaptured(new CameraFaceEventArgs(captureImage));
            }

            //auto turn off the process to reduce processing time cost
            processOnNewFrame = false;
        }
    }
Пример #8
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = new Mat();

            capture.Retrieve(frame, 0);

            Mat              image = frame; //Doc file theo kieu mau RBG 8-bit
            long             detectionTime;
            List <Rectangle> faces     = new List <Rectangle>();
            List <Rectangle> eyesleft  = new List <Rectangle>();
            List <Rectangle> eyesright = new List <Rectangle>();


            //The cuda cascade classifier doesn't seem to be able to load "haarcascade_frontalface_default.xml" file in this release
            //disabling CUDA module for now

            bool tryUseCuda   = false;
            bool tryUseOpenCL = true;


            DetectFace.Detect(
                image, "haarcascade_frontalface_default.xml", "haarcascade_lefteye_2splits.xml", "haarcascade_righteye_2splits.xml",
                faces, eyesleft, eyesright,
                tryUseCuda,
                tryUseOpenCL,
                out detectionTime);



            foreach (Rectangle face in faces)
            {
                CvInvoke.Rectangle(image, face, new Bgr(Color.White).MCvScalar, 3);
                Bitmap   c   = frame.Bitmap;
                Bitmap   bmp = new Bitmap(face.Size.Width, face.Size.Height);
                Graphics g   = Graphics.FromImage(bmp);
                g.DrawImage(c, 0, 0, face, GraphicsUnit.Pixel);


                foreach (Rectangle eyeleft in eyesleft)
                {
                    CvInvoke.Rectangle(image, eyeleft, new Bgr(Color.Black).MCvScalar, 3);
                    if (eyeleft.X != 0)
                    {
                        SendKeys.SendWait("{PGUP}");
                    }
                }

                foreach (Rectangle eyeright in eyesright)
                {
                    CvInvoke.Rectangle(image, eyeright, new Bgr(Color.Blue).MCvScalar, 3);
                    if (eyeright.X != 0)
                    {
                        SendKeys.SendWait("{PGDN}");
                    }
                }
            }
            imageBox2.Image = frame;//thiet lap hinh anh
        }
Пример #9
0
        private void GetFaces()
        {
            long   detectionTime;
            IImage image = (IImage)frame;//这一步是重点

            faceImage = frame.Bitmap;
            DetectFace.Detect(image
                              , "haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                              faces, eyes,
                              out detectionTime);
        }
Пример #10
0
        public FaceDetectionPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform Face Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                if (image == null || image[0] == null)
                {
                    return;
                }
                SetMessage("Please wait...");
                //SetImage(image[0]);
                SetImage(null);

                FileDownloadManager downloadManager = new FileDownloadManager();
                String url = "https://github.com/opencv/opencv/raw/4.2.0/data/haarcascades/";
                downloadManager.AddFile(url + "/haarcascade_frontalface_default.xml", "haarcascade");
                downloadManager.AddFile(url + "/haarcascade_eye.xml", "haarcascade");

                downloadManager.OnDownloadProgressChanged += DownloadManager_OnDownloadProgressChanged;

                await downloadManager.Download();

                String           faceFile = downloadManager.Files[0].LocalFile;
                String           eyeFile  = downloadManager.Files[1].LocalFile;
                long             time;
                List <Rectangle> faces = new List <Rectangle>();
                List <Rectangle> eyes  = new List <Rectangle>();

                using (UMat img = image[0].GetUMat(AccessType.ReadWrite))
                    DetectFace.Detect(img, faceFile, eyeFile, faces, eyes, out time);

                //Draw the faces in red
                foreach (Rectangle rect in faces)
                {
                    CvInvoke.Rectangle(image[0], rect, new MCvScalar(0, 0, 255), 2);
                }

                //Draw the eyes in blue
                foreach (Rectangle rect in eyes)
                {
                    CvInvoke.Rectangle(image[0], rect, new MCvScalar(255, 0, 0), 2);
                }

                String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";
                SetMessage(String.Format("Detected with {1} in {0} milliseconds.", time, computeDevice));

                SetImage(image[0]);
            };
        }
Пример #11
0
        private void AddBindings(string serverPath)
        {
            _ninjectKernel.Bind <IPictureRepository>().To <EFPictureRepository>();

            // TODO Использовать относительный путь
            var faceDetect = new DetectFace(serverPath + "bin\\haarcascade_frontalface_default.xml");

            _ninjectKernel.Bind <DetectFace>().ToConstant(faceDetect);
            _ninjectKernel.Bind <IFiltering>().To <Pixelate>()
            .WithConstructorArgument("pixelsX", 7)
            .WithConstructorArgument("pixelsY", 7);
        }
Пример #12
0
        public FaceDetectionPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform Face Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                if (image == null || image [0] == null)
                {
                    return;
                }
                SetMessage("Please wait...");
                SetImage(null);
                Task <Tuple <Mat, long> > t = new Task <Tuple <Mat, long> >(
                    () =>
                {
                    String faceFile;
                    String eyeFile;
                    bool fileOk = CheckCascadeFile("haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                                                   out faceFile,
                                                   out eyeFile);

                    long time;
                    List <Rectangle> faces = new List <Rectangle>();
                    List <Rectangle> eyes  = new List <Rectangle>();

                    using (UMat img = image[0].GetUMat(AccessType.ReadWrite))
                        DetectFace.Detect(img, faceFile, eyeFile, faces, eyes, out time);

                    foreach (Rectangle rect in faces)
                    {
                        CvInvoke.Rectangle(image[0], rect, new MCvScalar(0, 0, 255), 2);
                    }
                    foreach (Rectangle rect in eyes)
                    {
                        CvInvoke.Rectangle(image[0], rect, new MCvScalar(255, 0, 0), 2);
                    }

                    return(new Tuple <Mat, long>(image[0], time));
                });
                t.Start();

                var result = await t;
                SetImage(t.Result.Item1);
                String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";

                SetMessage(String.Format("Detected with {1} in {0} milliseconds.", t.Result.Item2, computeDevice));
            };
        }
Пример #13
0
        protected override void OnCreate(Bundle bundle)
        {
            base.OnCreate(bundle);

            OnButtonClick += delegate
            {
                AppPreference appPreference = new AppPreference();
                using (Image <Bgr, Byte> image = PickImage("lena.jpg"))
                {
                    ISharedPreferences preference = PreferenceManager.GetDefaultSharedPreferences(ApplicationContext);
                    String             appVersion = PackageManager.GetPackageInfo(PackageName, Android.Content.PM.PackageInfoFlags.Activities).VersionName;
                    if (!preference.Contains("cascade-data-version") || !preference.GetString("cascade-data-version", null).Equals(appVersion) ||
                        !(preference.Contains("cascade-eye-data-path") || preference.Contains("cascade-face-data-path")))
                    {
                        AndroidFileAsset.OverwriteMethod overwriteMethod = AndroidFileAsset.OverwriteMethod.AlwaysOverwrite;

                        FileInfo eyeFile  = AndroidFileAsset.WritePermanantFileAsset(this, "haarcascade_eye.xml", "cascade", overwriteMethod);
                        FileInfo faceFile = AndroidFileAsset.WritePermanantFileAsset(this, "haarcascade_frontalface_default.xml", "cascade", overwriteMethod);

                        //save tesseract data path
                        ISharedPreferencesEditor editor = preference.Edit();
                        editor.PutString("cascade-data-version", appVersion);
                        editor.PutString("cascade-eye-data-path", eyeFile.FullName);
                        editor.PutString("cascade-face-data-path", faceFile.FullName);
                        editor.Commit();
                    }

                    string           eyeXml  = preference.GetString("cascade-eye-data-path", null);
                    string           faceXml = preference.GetString("cascade-face-data-path", null);
                    long             time;
                    List <Rectangle> faces = new List <Rectangle>();
                    List <Rectangle> eyes  = new List <Rectangle>();

                    bool tryUseOpenCL = appPreference.UseOpenCL;
                    DetectFace.Detect(image.Mat, faceXml, eyeXml, faces, eyes, false, tryUseOpenCL, out time);
                    SetMessage(String.Format("Detected with {1} in {0} milliseconds.", time, CvInvoke.UseOpenCL ? "OpenCL" : "CPU"));

                    foreach (Rectangle rect in faces)
                    {
                        image.Draw(rect, new Bgr(System.Drawing.Color.Red), 2);
                    }
                    foreach (Rectangle rect in eyes)
                    {
                        image.Draw(rect, new Bgr(System.Drawing.Color.Blue), 2);
                    }

                    SetImageBitmap(image.ToBitmap());
                }
            };
        }
Пример #14
0
 private void ProcessFrame(object sender, EventArgs arg)
 {
     if (faceRecog.Checked == true)
     {
         long recpoints;
         Image <Bgr, Byte> img = new Image <Bgr, byte>(secondImageBox.Image.Bitmap);
         using (Image <Gray, Byte> modelImage = img.Convert <Gray, Byte>())
             using (Image <Gray, Byte> observedImage = _capture.RetrieveBgrFrame().Convert <Gray, Byte>())
             {
                 Image <Bgr, byte> result = SurfRecognizer.Draw(modelImage, observedImage, out recpoints);
                 captureImageBox.Image = observedImage;
                 if (recpoints > 10)
                 {
                     MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
                     //Draw "Hello, world." on the image using the specific font
                     result.Draw("Person Recognited, Welcome", ref f, new Point(40, 40), new Bgr(0, 255, 0));
                     ImageViewer.Show(result, String.Format(" {0} Points Recognited", recpoints));
                 }
             }
     }
     ///////////////////////////////////////////////////////////////////////
     if (faceRecog.Checked == false)
     {
         Image <Bgr, Byte> detectedface;
         Image <Bgr, Byte> frame = _capture.RetrieveBgrFrame();
         Image <Bgr, Byte> image = frame.Resize(400, 300, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);//Read the files as an 8-bit Bgr image
         long             detectionTime;
         List <Rectangle> faces = new List <Rectangle>();
         List <Rectangle> eyes  = new List <Rectangle>();
         DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
         foreach (Rectangle face in faces)
         {
             image.Draw(face, new Bgr(Color.Red), 2);
             image.ROI    = face;
             detectedface = image;
             if (eqHisChecked.Checked == false)
             {
                 secondImageBox.Image = detectedface.Convert <Gray, Byte>().Resize(2, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
             }
             CvInvoke.cvResetImageROI(image);
         }
         foreach (Rectangle eye in eyes)
         {
             image.Draw(eye, new Bgr(Color.Blue), 2);
         }
         captureImageBox.Image = image;
     }
 }
Пример #15
0
        bool HaveFace(Image <Bgr, Byte> fname)
        {
            long             detectionTime;
            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();

            DetectFace.Detect(
                fname, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                faces, eyes,
                out detectionTime);
            if (faces.Count == 1 && eyes.Count == 2)
            {
                return(true);
            }

            return(false);
        }
Пример #16
0
        List <Rectangle> DF(Image <Bgr, byte> image)
        {
            long             detectionTime;
            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();

            DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
            return(faces);

            //foreach (Rectangle eye in eyes)
            //	image.Draw(eye, new Bgr(Color.Blue), 2);

            //display the image
            //ImageViewer.Show(image, String.Format(
            //   "Completed face and eye detection using {0} in {1} milliseconds",
            //   GpuInvoke.HasCuda ? "GPU" : "CPU",
            //   detectionTime));
        }
Пример #17
0
        public override async Task <DetectPersonResult> ExecuteAsync(DetectPersonContext request)
        {
            var retResult = new DetectPersonResult();

            SelectPictureResult pictureResult;

            if (request.UseCamera)
            {
                var pictureRequest = new TakePictureRequest {
                    CameraOption = request.CameraOption, MaxPixelDimension = 500
                };
                pictureResult = await TakePicture.ExecuteAsync(pictureRequest);
            }
            else
            {
                var choosePictureRequest = new ChoosePictureRequest {
                    MaxPixelDimension = 500
                };
                pictureResult = await ChoosePicture.ExecuteAsync(choosePictureRequest);
            }

            retResult.Notification.AddRange(retResult.Notification);

            if (retResult.IsValid() && pictureResult.TaskResult == TaskResult.Success)
            {
                var faceContext = new DetectFaceContext
                {
                    FaceImage            = pictureResult.Image,
                    DetectFaceAttributes = request.DetectFaceAttributes,
                    DetectFaceId         = request.DetectFaceId,
                    DetectFaceLandmarks  = request.DetectFaceLandmarks
                };
                var recogResult = await DetectFace.ExecuteAsync(faceContext);

                retResult.Notification.AddRange(recogResult.Notification);

                if (retResult.IsValid())
                {
                    retResult.FaceData = recogResult.Face;
                }
            }

            return(retResult);
        }
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            ButtonText     = "Detect Face & Eyes";
            OnButtonClick += delegate
            {
                long processingTime;
                using (CascadeClassifier faceCascadeClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml"))
                    using (CascadeClassifier eyeCascadeClassifier = new CascadeClassifier("haarcascade_eye.xml"))
                        using (Image <Bgr, Byte> image = new Image <Bgr, Byte>("lena.jpg"))
                        {
                            List <Rectangle> faces = new List <Rectangle>();
                            List <Rectangle> eyes  = new List <Rectangle>();
                            DetectFace.Detect(
                                image.Mat,
                                faceCascadeClassifier,
                                eyeCascadeClassifier,
                                faces,
                                eyes,
                                out processingTime
                                );
                            foreach (Rectangle face in faces)
                            {
                                image.Draw(face, new Bgr(Color.Red), 1);
                            }
                            foreach (Rectangle eye in eyes)
                            {
                                image.Draw(eye, new Bgr(Color.Blue), 1);
                            }
                            Size frameSize = FrameSize;
                            using (Image <Bgr, Byte> resized = image.Resize(frameSize.Width, frameSize.Height, Emgu.CV.CvEnum.Inter.Nearest, true))
                            {
                                SetImage(resized);
                            }
                        }
                MessageText = String.Format(
                    "Processing Time: {0} milliseconds.",
                    processingTime
                    );
            };
        }
Пример #19
0
        private void addDatabaseButton_Click(object sender, EventArgs e)
        {
            //Take time for save filename
            string fileName = textBox1.Text + "_" + DateTime.Now.Day.ToString() + "-" + DateTime.Now.Month.ToString() + "-" + DateTime.Now.Year.ToString()
                              + "-" + DateTime.Now.Hour.ToString() + "-" + DateTime.Now.Minute.ToString() + "-" + DateTime.Now.Second.ToString() + ".jpg";

            //First The faces in the Image is detected
            Image <Bgr, Byte> image = _capture.RetrieveBgrFrame().Resize(400, 300, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            List <Rectangle>  faces = new List <Rectangle>();
            List <Rectangle>  eyes  = new List <Rectangle>();
            long detectionTime;

            DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
            foreach (Rectangle face in faces)
            {
                image.ROI = face;
            }
            Directory.CreateDirectory("TrainedFaces");
            image.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC).ToBitmap().Save("TrainedFaces\\" + fileName);
        }
Пример #20
0
        private async void OnButtonClicked(Object sender, EventArgs args)
        {
            Mat[] images = await LoadImages(new string[] { "lena.jpg" });

            if (images == null || images[0] == null)
            {
                return;
            }
            SetMessage("Please wait...");
            //SetImage(image[0]);
            SetImage(null);

            await InitDetector();

            long             time;
            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();

            using (UMat img = images[0].GetUMat(AccessType.ReadWrite))
                DetectFace.Detect(img, _faceCascadeClassifier, _eyeCascadeClassifier, faces, eyes, out time);

            //Draw the faces in red
            foreach (Rectangle rect in faces)
            {
                CvInvoke.Rectangle(images[0], rect, new MCvScalar(0, 0, 255), 2);
            }

            //Draw the eyes in blue
            foreach (Rectangle rect in eyes)
            {
                CvInvoke.Rectangle(images[0], rect, new MCvScalar(255, 0, 0), 2);
            }

            String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";

            SetMessage(String.Format("Detected with {1} in {0} milliseconds.", time, computeDevice));

            SetImage(images[0]);
        }
Пример #21
0
        public static Mat detectAndDisplayFunc(Mat frame)
        {
            var faces = new List <Rectangle>();
            var eyes  = new List <Rectangle>();

            DetectFace.Detect(
                frame, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                faces, eyes,
                out var detectionTime);

            //paint faces
            foreach (Rectangle face in faces)
            {
                CvInvoke.Rectangle(frame, face, new Bgr(Color.Red).MCvScalar, 2);
            }

            //paint eyes
            foreach (Rectangle eye in eyes)
            {
                CvInvoke.Rectangle(frame, eye, new Bgr(Color.Blue).MCvScalar, 2);
            }

            return(frame);
        }
Пример #22
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            if (_capture != null && _capture.Ptr != IntPtr.Zero)
            {
                Mat frame = _capture.QueryFrame();

                long             detectionTime;
                List <Rectangle> faces = new List <Rectangle>();
                List <Rectangle> eyes  = new List <Rectangle>();

                DetectFace.Detect(frame, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);

                foreach (Rectangle face in faces)
                {
                    Rectangle recFrame = new Rectangle(face.X, face.Y, face.Width, face.Height);

                    CvInvoke.Rectangle(frame, recFrame, new Bgr(Color.Red).MCvScalar, 2);
                    CvInvoke.Rectangle(frame, new Rectangle(recFrame.X, recFrame.Y - 20, recFrame.Width / 2, 20), new Bgr(Color.Red).MCvScalar, -1);
                    CvInvoke.PutText(frame, "Xmal", new Point(recFrame.X + 3, recFrame.Y - 3), FontFace.HersheyComplexSmall, 1.0, new MCvScalar(255.0, 255.0, 255.0), 1);
                }

                pbImg.Image = frame;
            }
        }
Пример #23
0
        private void ProcessFrame2(object sender, EventArgs arg)
        {
            if (comboBoxCapture.Text == "Camera")
            {
                image = _capture.RetrieveBgrFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }


            if (comboBoxCapture.Text == "Single Image")
            {
                OpenFileDialog Openfile = new OpenFileDialog();
                if (Openfile.ShowDialog() == DialogResult.OK)
                {
                    image = new Image <Bgr, byte>(Openfile.FileName);
                }
            }

            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();
            long             detectionTime;

            DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
            foreach (Rectangle face in faces)
            {
                //Image ROI selected as each face in image
                if (workCorruptedImages.Checked == true)
                {
                    image.ROI = face;
                }
                if (faceRecog.Checked == true)
                {
                    //now program apply selected algorithm if recognition has started

                    //For SURF Algorithm
                    if (comboBoxAlgorithm.Text == "SURF Feature Extractor")
                    {
                        string   dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";
                        string[] files         = Directory.GetFiles(dataDirectory, "*.jpg", SearchOption.AllDirectories);

                        foreach (var file in files)
                        {
                            richTextBox1.Text += file.ToString();
                            long recpoints;
                            Image <Bgr, Byte> sampleImage = new Image <Bgr, Byte>(file);
                            secondImageBox.Image = sampleImage;
                            using (Image <Gray, Byte> modelImage = sampleImage.Convert <Gray, Byte>())
                                using (Image <Gray, Byte> observedImage = image.Convert <Gray, Byte>())
                                {
                                    Image <Bgr, byte> result = SurfRecognizer.Draw(modelImage, observedImage, out recpoints);
                                    //captureImageBox.Image = observedImage;
                                    if (recpoints > 10)
                                    {
                                        MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
                                        result.Draw("Person Recognited, Welcome", ref f, new Point(40, 40), new Bgr(0, 255, 0));
                                        ImageViewer.Show(result, String.Format(" {0} Points Recognited", recpoints));
                                    }
                                }
                        }
                    }
                    //For EigenFaces
                    else if (comboBoxAlgorithm.Text == "EigenFaces")
                    {
                        CvInvoke.cvResetImageROI(image);
                        //image._EqualizeHist();
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = eigenFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            image.Draw(eigenlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                        }
                    }
                    //For FisherFaces
                    else if (comboBoxAlgorithm.Text == "FisherFaces")
                    {
                        CvInvoke.cvResetImageROI(image);
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = fisherFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            image.Draw(fisherlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                        }
                    }

                    //For LBPH
                    else if (comboBoxAlgorithm.Text == "LBPHFaces")
                    {
                        if (eqHisChecked.Checked == true)
                        {
                            image._EqualizeHist();
                        }
                        var result = lbphFaceRecognizer.Predict(image.Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
                        if (result.Label != -1)
                        {
                            CvInvoke.cvResetImageROI(image);
                            image.Draw(lbphlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
                            label6.Text = result.Distance.ToString();
                            label7.Text = lbphlabels[result.Label].ToString();
                        }
                    }
                }

                CvInvoke.cvResetImageROI(image);
                image.Draw(face, new Bgr(Color.Red), 2);
            }
            captureImageBox.Image = image;
        }
Пример #24
0
        protected override void OnCreate(Bundle bundle)
        {
            base.OnCreate(bundle);

            OnImagePicked += (sender, image) =>
            {
                if (image == null)
                {
                    return;
                }

                AppPreference appPreference = new AppPreference();
                CvInvoke.UseOpenCL = appPreference.UseOpenCL;
                String oclDeviceName = appPreference.OpenClDeviceName;
                if (!String.IsNullOrEmpty(oclDeviceName))
                {
                    CvInvoke.OclSetDefaultDevice(oclDeviceName);
                }


                ISharedPreferences preference = PreferenceManager.GetDefaultSharedPreferences(ApplicationContext);
                String             appVersion = PackageManager.GetPackageInfo(PackageName, Android.Content.PM.PackageInfoFlags.Activities).VersionName;
                if (!preference.Contains("cascade-data-version") || !preference.GetString("cascade-data-version", null).Equals(appVersion) ||
                    !(preference.Contains("cascade-eye-data-path") || preference.Contains("cascade-face-data-path")))
                {
                    AndroidFileAsset.OverwriteMethod overwriteMethod = AndroidFileAsset.OverwriteMethod.AlwaysOverwrite;

                    FileInfo eyeFile  = AndroidFileAsset.WritePermanantFileAsset(this, "haarcascade_eye.xml", "cascade", overwriteMethod);
                    FileInfo faceFile = AndroidFileAsset.WritePermanantFileAsset(this, "haarcascade_frontalface_default.xml", "cascade", overwriteMethod);

                    //save tesseract data path
                    ISharedPreferencesEditor editor = preference.Edit();
                    editor.PutString("cascade-data-version", appVersion);
                    editor.PutString("cascade-eye-data-path", eyeFile.FullName);
                    editor.PutString("cascade-face-data-path", faceFile.FullName);
                    editor.Commit();
                }

                string           eyeXml  = preference.GetString("cascade-eye-data-path", null);
                string           faceXml = preference.GetString("cascade-face-data-path", null);
                long             time;
                List <Rectangle> faces = new List <Rectangle>();
                List <Rectangle> eyes  = new List <Rectangle>();


                DetectFace.Detect(image, faceXml, eyeXml, faces, eyes, out time);

                String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Emgu.CV.Ocl.Device.Default.Name : "CPU";
                SetMessage(String.Format("Detected with {1} in {0} milliseconds.", time, computeDevice));

                foreach (Rectangle rect in faces)
                {
                    CvInvoke.Rectangle(image, rect, new Bgr(System.Drawing.Color.Red).MCvScalar, 2);
                }

                foreach (Rectangle rect in eyes)
                {
                    CvInvoke.Rectangle(image, rect, new Bgr(System.Drawing.Color.Blue).MCvScalar, 2);
                }

                SetImageBitmap(image.ToBitmap());
                image.Dispose();

                if (CvInvoke.UseOpenCL)
                {
                    CvInvoke.OclFinish();
                }
            };

            OnButtonClick += (sender, args) =>
            {
                PickImage("lena.jpg");
            };
        }
Пример #25
0
 public HomeController(DetectFace detectFace, IFiltering fiters, IPictureRepository repository)
 {
     _detectFace = detectFace;
     _filters    = fiters;
     _repository = repository;
 }
Пример #26
0
        /// <summary>
        /// Метод разпознования объектов
        /// </summary>
        /// <param name="filenameForRecognize"></param>
        /// <param name="cascade"></param>
        private void Recognize(string filenameForRecognize, string cascade)
        {
            string filename = string.Format("{0}/{1}", App._APRSDir, cascade);

            long time;

            List <Rectangle> tubes    = new List <Rectangle>();
            List <Rectangle> old      = new List <Rectangle>();
            List <double>    areaList = new List <double>();
            CircleF          circle   = new CircleF();
            int     counter           = 0;
            int     i    = 1; //counter tubes
            MCvFont font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX_SMALL, 1.0, 1.0);


            var ArchFile = filenameForRecognize.LoadAndResizeBitmap(1024, 1024);
            //Haar cascade
            var FileForRecognize = new Image <Bgr, byte>(ArchFile);

            DetectFace.Detect(FileForRecognize, filename, filename, tubes, old, out time);
            double AVGarea = 0.00;

            foreach (Rectangle tube in tubes)
            {
                var area = (3.14) * tube.Width * tube.Height;
                areaList.Add(area);
            }
            try
            {
                AVGarea = areaList.Average();
            }
            catch (Exception nullObjDetect)
            {
                Toast.MakeText(this, "Нет найденых объектов!!!", ToastLength.Short).Show();
            }
            foreach (var tube in tubes.OrderBy(s => s.X).ThenBy(u => u.Y))
            {
                System.Drawing.Point point = new System.Drawing.Point(tube.X + tube.Width / 3, tube.Y + tube.Height / 2);
                circle.Center = new System.Drawing.PointF(tube.X + tube.Width / 2, tube.Y + tube.Height / 2);
                circle.Radius = tube.Width / 2;
                var area = (3.14) * tube.Width * tube.Height;
                if (area / AVGarea * 100 <= 40) // меньше или равно 20 % от среднего по детектируемым объектам - не выводить в детектор
                {
                    continue;
                }
                counter = i++;
                if (FileForRecognize.Width <= 1024 && FileForRecognize.Height <= 768)
                {
                    FileForRecognize.Draw(circle, new Bgr(System.Drawing.Color.Yellow), 1);
                    FileForRecognize.Draw(string.Format("{0}", counter), ref font, point, new Bgr(System.Drawing.Color.Red));
                }
                else
                {
                    FileForRecognize.Draw(circle, new Bgr(System.Drawing.Color.Yellow), 7);
                    FileForRecognize.Draw(string.Format("{0}", counter), ref font, point, new Bgr(System.Drawing.Color.Red));
                }
            }
            //Toast.MakeText(this, "Количество: " + counter + "  Затрачено времени: " + time, ToastLength.Long).Show();
            AlertDialog.Builder alert = new AlertDialog.Builder(this);
            alert.SetTitle("Подтверждение");
            alert.SetMessage(string.Format("Распознано объектов: {0} , " +
                                           "                           Время распознавания: {1}", counter.ToString(), time.ToString()));

            alert.SetPositiveButton("Подтверждение", (senderAlert, args) => { Toast.MakeText(this, "Подтверждено!", ToastLength.Short).Show(); });
            RunOnUiThread(() => { alert.Show(); });
            imageView.SetImageBitmap(FileForRecognize.ToBitmap());

            GC.Collect();
        }
Пример #27
0
        public IActionResult Analysis()
        {
            string fileName = TempData["file"].ToString();

            string webFile  = "~/CameraPhotos" + $@"/{fileName}";
            string filePath = Path.Combine(Environment.WebRootPath, "CameraPhotos") + $@"\{fileName}";

            FaceAttributesViewModel         faceAttributes = null;
            IList <FaceAttributesViewModel> listOfFaces    = new List <FaceAttributesViewModel>();

            try
            {
                DetectFace detector     = new DetectFace(filePath, Config);
                var        detectorTask = detector.Run();
                var        csFaceList   = detectorTask.Result;

                if (csFaceList.Count() > 0)
                {
                    foreach (var face in csFaceList)
                    {
                        faceAttributes = new FaceAttributesViewModel
                        {
                            Age        = face.FaceAttributes.Age,
                            Gender     = face.FaceAttributes.Gender.ToString().ToLower() == "male" ? "Hombre" : "Mujer",
                            Makeup     = face.FaceAttributes.Makeup.LipMakeup || face.FaceAttributes.Makeup.EyeMakeup ? "Si" : "No",
                            FacialHair = ((face.FaceAttributes.FacialHair.Beard > 0) ||
                                          (face.FaceAttributes.FacialHair.Moustache > 0) ||
                                          (face.FaceAttributes.FacialHair.Sideburns > 0)) ? "Si" : "No",
                            Glasses = face.FaceAttributes.Glasses.ToString(),
                            Smile   = face.FaceAttributes.Smile > 0.2 ? "Si" : "No",
                            Image   = webFile,
                            Hair    = face.FaceAttributes.Hair.HairColor[0].Color.ToString()
                        };
                        listOfFaces.Add(faceAttributes);
                    }
                }
                else
                {
                    faceAttributes = new FaceAttributesViewModel
                    {
                        Image = webFile,
                        Age   = -1
                    };
                    listOfFaces.Add(faceAttributes);
                }
            }
            catch (Exception)
            {
                throw;
            }

            ////Data for testing the view if you don't want to call CS
            //faceAttributes = new FaceAttributesViewModel
            //{
            //    Age = 43,
            //    Gender = "Hombre",
            //    Makeup = "No",
            //    FacialHair = "No",
            //    Glasses = "Si",
            //    Smile = "No",
            //    Hair = "Brown",
            //    Image = webFile
            //};
            //listOfFaces.Add(faceAttributes);

            return(View(listOfFaces));
        }