Ejemplo n.º 1
0
        private void FaceMonitoring(object sender, EventArgs e)
        {
            try
            {
                // Get the current frame form capture device
                CurrentFrame = Grabber.QueryFrame().Resize(320, 240, INTER.CV_INTER_CUBIC);

                //Video.Serve(CurrentFrame.Bytes);
                //System.Diagnostics.Debug.Print(Convert.ToBase64String(CurrentFrame.Bytes));

                // Convert it to Grayscale
                Gray = CurrentFrame.Convert <Gray, byte>();

                // Face Detector
                MCvAvgComp[][] facesDetected = Gray.DetectHaarCascade(Face, 1.2, 10, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));

                if (facesDetected == null)
                {
                    return;
                }

                // Action for each element detected
                foreach (MCvAvgComp face in facesDetected[0])
                {
                    Result = CurrentFrame.Copy(face.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);

                    // Draw the face detected in the 0th (gray) channel with blue color
                    CurrentFrame.Draw(face.rect, new Bgr(Color.Red), 2);

                    // Check if there is trained images to find a match
                    if (TrainingImages.ToArray().Length != 0)
                    {
                        var eigenDistanceThreshold = 3000;
                        var eps = 0.001;

                        // TermCriteria for face recognition with numbers of trained images like maxIteration
                        MCvTermCriteria termCrit = new MCvTermCriteria(TrainedFacesCounter, eps);

                        // Eigen face recognizer
                        FaceRecognitionEngine recognizer = new FaceRecognitionEngine(TrainingImages.ToArray(), LabelList.ToArray(), eigenDistanceThreshold, ref termCrit);
                        var imgLabel = recognizer.Recognize(Result);
                        LogStats($"{imgLabel} - Recognized");

                        // Draw the label for each face detected and recognized
                        // CurrentFrame.Draw(imgLabel, ref Font, new Point(face.rect.X - 2, face.rect.Y - 2), new Bgr(Color.White));
                        MethodInvoker inv = delegate { lblLabelName.Text = imgLabel.Replace("_", " ").ToUpper(); };
                        Invoke(inv);
                    }
                }

                // Show the face procesed and recognized
                imageBoxFrameGrabber.Image = CurrentFrame;
            }
            catch
            {
            }
        }
Ejemplo n.º 2
0
        public List <Eye> DetectEye()
        {
            MCvAvgComp[][] objects = Gray.DetectHaarCascade(_eyeCascade);
            List <Eye>     res     = new List <Eye>();

            foreach (MCvAvgComp o in objects[0])
            {
                _image.ROI = o.rect;
                res.Add(new Eye(_image.Copy(), o.rect));
            }
            _image.ROI = Rectangle.Empty;
            return(res);
        }
Ejemplo n.º 3
0
        private void SavePicture(object sender, EventArgs e)
        {
            try
            {
                TrainedFacesCounter++;

                // Get a gray frame from capture device
                Gray = Grabber.QueryGrayFrame().Resize(320, 240, INTER.CV_INTER_CUBIC);

                // Face Detector
                var            scaleFactor   = 1.2;
                var            minNeighbors  = 10;
                var            detectionType = HAAR_DETECTION_TYPE.DO_CANNY_PRUNING;
                MCvAvgComp[][] facesDetected = Gray.DetectHaarCascade(Face, scaleFactor, minNeighbors, detectionType, new Size(20, 20));

                // Action for each element detected
                foreach (MCvAvgComp f in facesDetected[0])
                {
                    TrainedFace = CurrentFrame.Copy(f.rect).Convert <Gray, byte>();
                    break;
                }

                LogIt($"Handle frame - scaleFactor:{scaleFactor} minNeighbors:{minNeighbors} detectionType:{detectionType.ToString()}");

                string labelName = textBox1.Text.Trim().Replace(" ", "_").ToLower();

                // Resize face detected image in order to force to compare the same size with the
                // Test image with cubic interpolation type method
                TrainedFace = Result.Resize(100, 100, INTER.CV_INTER_CUBIC);

                // Show face added in gray scale
                imageBox1.Image = TrainedFace;

                var imgPath = $"/TrainedFaces/{labelName}@face{DateTime.Now.ToString("yyyymmddhhmmssmm")}.bmp";
                TrainedFace.Save(Application.StartupPath + imgPath);
            }
            catch { }
        }