예제 #1
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            using (Image nextframe = cap.QueryFrame())
            {
                if (nextframe != null)
                {
                    if (isTrack == false)
                    {
                        Image grayframe = nextframe.Convert();
                        grayframe._EqualizeHist();

                        var faces = grayframe.DetectHaarCascade(haar, 1.4, 4, HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT | HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(40, 40))[0];

                        hsv = new Image(grayframe.Width, grayframe.Height);
                        hsv = nextframe.Convert();
                        hsv._EqualizeHist();

                        hue         = new Image(grayframe.Width, grayframe.Height);
                        mask        = new Image(grayframe.Width, grayframe.Height);
                        backproject = new Image(grayframe.Width, grayframe.Height);

                        Emgu.CV.CvInvoke.cvInRangeS(hsv, new MCvScalar(0, 30, Math.Min(10, 255), 0), new MCvScalar(180, 256, Math.Max(10, 255), 0), mask);
                        Emgu.CV.CvInvoke.cvSplit(hsv, hue, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);

                        picHue.Image = hue.ToBitmap();


                        foreach (var face in faces)
                        {
                            // Rectangle roi = new Rectangle(face.rect.X + face.rect.Width / 4, face.rect.Y + face.rect.Height / 4, face.rect.Width / 2, face.rect.Height / 2);
                            // Rectangle roi = new Rectangle(face.rect.X, face.rect.Y, face.rect.Width / 2, face.rect.Height / 2);

                            Emgu.CV.CvInvoke.cvSetImageROI(hue, face.rect);
                            Emgu.CV.CvInvoke.cvSetImageROI(mask, face.rect);

                            nextframe.Draw(face.rect, new Bgr(0, double.MaxValue, 1), 2);
                            picMask.Image = mask.ToBitmap();
                            trackwin      = face.rect;
                        }
                        img = new IntPtr[1]
                        {
                            hue
                        };

                        Emgu.CV.CvInvoke.cvCalcHist(img, hist, false, mask);

                        Emgu.CV.CvInvoke.cvResetImageROI(hue);
                        Emgu.CV.CvInvoke.cvResetImageROI(mask);

                        CapImg.Image = nextframe.ToBitmap();
                        isTrack      = true;
                        // isTrack = true;
                    }
                    else
                    {
                        if (trackwin != null)
                        {
                            hsv = nextframe.Convert();
                            Emgu.CV.CvInvoke.cvInRangeS(hsv, new MCvScalar(0, 30, 10, 0), new MCvScalar(180, 256, 256, 0), mask);
                            Emgu.CV.CvInvoke.cvSplit(hsv, hue, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);
                            picMask.Image = mask.ToBitmap();
                            picHue.Image  = hue.ToBitmap();
                        }

                        img = new IntPtr[1]
                        {
                            hue
                        };

                        Emgu.CV.CvInvoke.cvCalcBackProject(img, backproject, hist);
                        Emgu.CV.CvInvoke.cvAnd(backproject, mask, backproject, IntPtr.Zero);

                        Image grayframe = nextframe.Convert();
                        grayframe._EqualizeHist();


                        var faces = grayframe.DetectHaarCascade(haar, 1.4, 4, HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT | HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(40, 40))[0];
                        foreach (var face in faces)
                        {
                            nextframe.Draw(face.rect, new Bgr(Color.Black), 2);
                        }


                        if (trackwin.Width == 0)
                        {
                            trackwin.Width = 40;
                        }
                        if (trackwin.Height == 0)
                        {
                            trackwin.Height = 40;
                        }

                        Emgu.CV.CvInvoke.cvCamShift(backproject, trackwin, new MCvTermCriteria(10, 0.1), out trackcomp, out trackbox);
                        trackwin = trackcomp.rect;

                        // CvInvoke.cvEllipseBox(nextframe, trackbox, new MCvScalar(0, 255, 0), 2, LINE_TYPE.CV_AA, 0);


                        nextframe.Draw(trackwin, new Bgr(Color.Blue), 3);
                        CapImg.Image  = nextframe.ToBitmap();
                        faceS         = nextframe.Copy(trackwin);
                        picFace.Image = faceS.ToBitmap();
                    }
                }
            }
        }
예제 #2
0
        public void FrameGrabber(object sender, EventArgs e)
        {
            NamePersons.Add("");

            // capture a frame form  device both face and all things on the image
            currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            gray         = currentFrame.Convert <Gray, Byte>();
            //(TestImageBox.Image = currentFrame);
            //Result of haarCascade will be on the "MCvAvgComp"-facedetected (is it face or not )
            MCvAvgComp[][] facesDetected  = gray.DetectHaarCascade(face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); //face
            MCvAvgComp[][] StoreEyes      = gray.DetectHaarCascade(eye, 1.2, 2, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(1, 1));                    //eye
            MCvAvgComp[][] Mouthdetection = gray.DetectHaarCascade(mouth, 3, 3, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(1, 1));

            foreach (MCvAvgComp mouths in Mouthdetection[0]) // loop over all the detected MOUTHs
            {
                //currentFrame.Draw(mouths.rect, new Bgr(Color.Blue), 1);
                // draw a recangle for each mouth and put it on the display
                int   mouthLableX = mouths.rect.X;                       // find the X top-left corner of the mouth for labling
                int   mouthLableY = mouths.rect.Y;                       // find the Y top-left corner of the mouth for labling
                Point p           = new Point(mouthLableX, mouthLableY); // create a point to attach the "MOUTH" lable to it
            }

            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                currentFrame.Draw(f.rect, new Bgr(Color.Green), 2); //Frame detect colour is 'read'
                if (trainingImages.ToArray().Length != 0)
                {
                    MCvTermCriteria       termCrit   = new MCvTermCriteria(ContTrain, 0.001);
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), labels.ToArray(), 3000, ref termCrit);

                    name = recognizer.Recognize(result); // detected name of the face is been saved  to the 'name'-variable
                    if (name == null)
                    {
                        currentFrame.Draw(f.rect, new Bgr(Color.Green), 2); //Frame detect colour is 'read'
                    }
                    //the colour of  the face label name
                }
                NamePersons[t - 1] = name;
                NamePersons.Add("");
                label3.Text = facesDetected[0].Length.ToString();
            }
            t = 0;
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn];
            }

            label3.Text = names;
            names       = "";
            NamePersons.Clear();

            NamesPersons.Add("");
            foreach (MCvAvgComp eyes in StoreEyes[0]) // more than one eye may be detected so for each eye there will be a rectangle
            {
                tem     = tem + 1;
                results = currentFrame.Copy(eyes.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                currentFrame.Draw(eyes.rect, new Bgr(Color.Red), 2);
                // for each detecte eye in the image a rectangle will be drawn on the image
                int   eyeLableX = eyes.rect.X;                     // find the X top-left corner of the eye for labeling
                int   eyeLableY = eyes.rect.Y;                     // find the Y top-left corneer of the eye for lableing
                Point p         = new Point(eyeLableX, eyeLableY); // create a point p to attach the lable "EYE" to it
                //currentFrame.Draw(name, ref font, p, new Bgr(Color.Red)); // add a string "EYE" to the image captured from webcam just to be distingushable
                if (trainingImages.ToArray().Length != 0)
                {
                    MCvTermCriteria       termCrit   = new MCvTermCriteria(ContTrain, 0.001);
                    EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), labels.ToArray(), 3000, ref termCrit);
                    eyename = recognizer.Recognize(results); // detected name of the face is been saved  to the 'name'-variable
                }

                NamesPersons[tem - 1] = eyename;
                NamesPersons.Add("");
                label10.Text = StoreEyes[0].Length.ToString();
            }
            tem = 0;
            for (int ee = 0; ee < StoreEyes[0].Length; ee++)
            {
                eyenames = eyenames + NamesPersons[ee];
            }

            label10.Text = eyenames;
            eyenames     = "";
            NamesPersons.Clear();


            imageBox1.Image = currentFrame;
        }