예제 #1
0
        private bool FaceDetect(string sandboxPath, string cascadeXmlPath)
        {
            //using (IplImage img = new IplImage(sandboxPath, LoadMode.Color))
            try {
                //using (IplImage img = Cv.LoadImage(sandboxPath, LoadMode.Color))
                using (IplImage img = Cv.LoadImage(sandboxPath, LoadMode.GrayScale))
                {
                    using (IplImage gray = Cv.CreateImage(new CvSize(img.Width, img.Height), BitDepth.U8, 1))
                    {
                        //Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, img, Interpolation.Linear);
                        Cv.EqualizeHist(img, img);
                        //EqualizeHist = 히스토그램 평활화. 그레이 이미지를 특출나게 어둡거나 밝은 부분을 적당히 펴줘서 전체 값이 일정하게 되도록 해줌
                    }

                    using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(cascadeXmlPath))
                        //haarcascade_frontalface_alt2.xml얼굴 검출에 대한 기계학습 자료가 담겨져 있음.
                        //이자료와 비교해서 비슷하면 얼굴로 인식하게 되어 있음
                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            storage.Clear();
                            //얼굴의 검출
                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(img, cascade, storage, 1.1, 2, 0, new CvSize(30, 30));
                            //검출된 얼굴을 facesdp 저장. faces.Total에 총 검풀된 얼굴수가 들어가 있음
                            Console.WriteLine("-----");
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvAvgComp comp = (CvAvgComp)Cv.GetSeqElem(faces, i);
                                Console.WriteLine(comp.Rect.TopLeft + " " + comp.Rect.Width + " " + comp.Rect.Height);
                            }
                            return(faces.Total > 0);
                        }
                }
            }
            catch (Exception e)
            {
                System.Console.WriteLine(e);
                return(false);
            }
        }
예제 #2
0
        private List <ObjRect> DetectFeaturesInFace(IplImage img, CvMemStorage storage, CvAvgComp face)
        {
            List <ObjRect> eyes = new List <ObjRect>();

            storage.Clear();
            //Take the top 4/8ths of the face as the region of interest
            CvRect r = face.Rect;

            r.Height = Cv.Round((double)r.Height / 2);
            img.SetROI(r);

            //Look for pairs there
            CvAvgComp[] pairs = Cv.HaarDetectObjects(img, Cascades["EyePair22"], storage, 1.0850, 2, 0, new CvSize(r.Width < 50 ? 11 : 22, r.Width < 50 ? 3 : 5)).ToArrayAndDispose();
            //Array.Sort<CvAvgComp>(pairs, CompareByNeighbors);

            //Look for individual eyes if no pairs were found
            if (pairs.Length == 0)
            {
                //Drop 1/2.75th off the top, leaving us with a full-width rectangle starting at 1/5.5th and stopping at 1/2th of face height.
                int aFifth = Cv.Round((double)r.Height * 2 / 5.5);
                r.Y      += aFifth;
                r.Height -= aFifth;

                eyes.AddRange(DetectEyesInRegion(img, storage, r));
            }
            //If there are pairs, evalutate them all. Finding eyes within multiple pairs is unlikely
            for (var i = 0; i < pairs.Length; i++)
            {
                CvAvgComp pair = pairs[i]; //Adjust for ROI
                pair.Rect.X += r.X;
                pair.Rect.Y += r.Y;
                eyes.AddRange(DetectFeaturesInPair(img, storage, pair));
            }
            if (eyes.Count > 0)
            {
                eyes.Add(new ObjRect(face.Rect.ToRectangleF(), FeatureType.Face));
            }
            return(eyes);
        }
예제 #3
0
        private List <ObjRect> DetectFeaturesInPair(IplImage img, CvMemStorage storage, CvAvgComp eyePair)
        {
            List <ObjRect> eyes = new List <ObjRect>();
            CvRect         pair = eyePair.Rect;

            //Inflate 100% vertically, centering
            pair.Top    -= pair.Height / 2;
            pair.Height *= 2;
            if (pair.Top < 0)
            {
                pair.Height += pair.Top; pair.Top = 0;
            }
            if (pair.Height >= img.Height)
            {
                pair.Height = img.Height;
            }
            if (pair.Bottom >= img.Height)
            {
                pair.Top = img.Height - pair.Height;
            }

            //Inflate 20% on each side, centering
            pair.Left  -= pair.Width / 5;
            pair.Width += pair.Width / 5 * 2;
            pair.Left   = Math.Max(0, pair.Left);
            pair.Width  = Math.Min(img.Width - pair.Left, pair.Width);

            eyes.AddRange(DetectEyesInRegion(img, storage, pair));

            if (eyes.Count > 0)
            {
                eyes.Add(new ObjRect(eyePair.Rect.ToRectangleF(), FeatureType.EyePair));
            }
            return(eyes);
        }
예제 #4
0
 protected int CompareByNeighbors(CvAvgComp a, CvAvgComp b)
 {
     return(b.Neighbors.CompareTo(a.Neighbors));
 }