Beispiel #1
0
        public Squares()
        {
            // create memory storage that will contain all the dynamic data
            CvMemStorage storage = new CvMemStorage(0);

            for (int i = 0; i < _names.Length; i++)
            {
                // load i-th image
                using (IplImage img = new IplImage(_names[i], LoadMode.Color))
                {
                    // create window and a trackbar (slider) with parent "image" and set callback
                    // (the slider regulates upper threshold, passed to Canny edge detector)
                    Cv.NamedWindow(WindowName, WindowMode.AutoSize);

                    // find and draw the squares
                    DrawSquares(img, FindSquares4(img, storage));
                }

                // clear memory storage - reset free space position
                storage.Clear();

                // wait for key.
                // Also the function cvWaitKey takes care of event processing
                int c = Cv.WaitKey(0);
                if ((char)c == 27)
                {
                    break;
                }
            }

            Cv.DestroyWindow(WindowName);
        }
Beispiel #2
0
        public FaceDetect()
        {
            CheckMemoryLeak();

            // CvHaarClassifierCascade, cvHaarDetectObjects

            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double Scale        = 1.14;
            const double ScaleFactor  = 1.0850;
            const int    MinNeighbors = 2;

            using (IplImage img = new IplImage(FilePath.Image.Yalta, LoadMode.Color))
                using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                {
                    using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, smallImg, Interpolation.Linear);
                        Cv.EqualizeHist(smallImg, smallImg);
                    }

                    using (var cascade = CvHaarClassifierCascade.FromFile(FilePath.Text.HaarCascade))
                        using (var storage = new CvMemStorage())
                        {
                            storage.Clear();

                            // 顔の検出
                            Stopwatch         watch = Stopwatch.StartNew();
                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
                            watch.Stop();
                            Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds);

                            // 検出した箇所にまるをつける
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvRect  r      = faces[i].Value.Rect;
                                CvPoint center = new CvPoint
                                {
                                    X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                    Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                };
                                int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                            }
                        }

                    // ウィンドウに表示
                    CvWindow.ShowImages(img);
                }
        }
Beispiel #3
0
        public int Detect()
        {
            const double Scale        = 1.04;
            const double ScaleFactor  = 1.139;
            const int    MinNeighbors = 2;

            IplImage smallImg = new IplImage(new CvSize(Cv.Round(_src.Width / Scale), Cv.Round(_src.Height / Scale)), BitDepth.U8, 1);
            IplImage gray     = new IplImage(_src.Size, BitDepth.U8, 1);

            Cv.CvtColor(_src, gray, ColorConversion.BgrToGray);
            Cv.Resize(gray, smallImg, Interpolation.Linear);
            Cv.EqualizeHist(smallImg, smallImg);

            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));

            Faces.Clear();
            for (int i = 0; i < faces.Total; i++)
            {
                CvRect r = faces[i].Value.Rect;

                r.X      = Cv.Round(r.X * Scale);
                r.Y      = Cv.Round(r.Y * Scale);
                r.Width  = Cv.Round(r.Width * Scale);
                r.Height = Cv.Round(r.Height * Scale);
                _src.SetROI(r);
                IplImage p = new IplImage(64, 64, _src.Depth, 3);
                Cv.Resize(_src, p);
                Faces.Add(p);
                _src.ResetROI();
            }
            storage.Clear();
            return(faces.Total);
        }
Beispiel #4
0
        public EyeDetect()
        {
            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double Scale        = 1.25;
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 2;

            using (CvCapture cap = CvCapture.FromCamera(1))
                using (CvWindow w = new CvWindow("Eye Tracker"))
                {
                    while (CvWindow.WaitKey(10) < 0)
                    {
                        using (IplImage img = cap.QueryFrame())
                            using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                            {
                                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                                {
                                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                                    Cv.EqualizeHist(smallImg, smallImg);
                                }

                                using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\Program Files\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml"))
                                    using (CvMemStorage storage = new CvMemStorage())
                                    {
                                        storage.Clear();

                                        Stopwatch         watch = Stopwatch.StartNew();
                                        CvSeq <CvAvgComp> eyes  = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
                                        watch.Stop();
                                        //Console.WriteLine("detection time = {0}msn", watch.ElapsedMilliseconds);

                                        for (int i = 0; i < eyes.Total; i++)
                                        {
                                            CvRect  r      = eyes[i].Value.Rect;
                                            CvPoint center = new CvPoint
                                            {
                                                X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                                Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                            };
                                            int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                            img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                                        }
                                    }

                                w.Image = img;
                            }
                    }
                }
        }
Beispiel #5
0
        public System.Drawing.Bitmap FaceDetect(IplImage src)
        {
            // CvHaarClassifierCascade, cvHaarDetectObjects
            // 얼굴을 검출하기 위해서 Haar 분류기의 캐스케이드를 이용한다

            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double scale        = 1.04;
            const double scaleFactor  = 1.139;
            const int    minNeighbors = 1;

            using (IplImage img = src.Clone())
                using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 1))
                {
                    // 얼굴 검출을 위한 화상을 생성한다.
                    using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, smallImg, Interpolation.Linear);
                        Cv.EqualizeHist(smallImg, smallImg);
                    }

                    using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Environment.CurrentDirectory + "\\" + "haarcascade_frontalface_alt.xml"))
                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            storage.Clear();

                            // 얼굴을 검출한다.
                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(20, 20));

                            // 검출한 얼굴에 검은색 원을 덮어씌운다.
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvRect  r      = faces[i].Value.Rect;
                                CvPoint center = new CvPoint
                                {
                                    X = Cv.Round((r.X + r.Width * 0.5) * scale),
                                    Y = Cv.Round((r.Y + r.Height * 0.5) * scale)
                                };
                                int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale);
                                img.Circle(center, radius, new CvColor(0, 0, 0), -1, LineType.Link8, 0);
                            }
                        }
                    FindFace = img.Clone();

                    //생성한 IplImage 화상을 비트맵으로 변환해 반환한다.
                    return(FindFace.ToBitmap(System.Drawing.Imaging.PixelFormat.Format24bppRgb));
                }
        }
Beispiel #6
0
        private void 얼굴검출ToolStripMenuItem_Click(object sender, EventArgs e)
        {
            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double scale        = 1.04;
            const double scaleFactor  = 1.139;
            const int    minNeighbors = 2;

            using (IplImage img = src.Clone())
                using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 1))
                {
                    // 얼굴 검출용의 화상의 생성
                    using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, smallImg, Interpolation.Linear);
                        Cv.EqualizeHist(smallImg, smallImg);
                    }

                    //using (CvHaarClassifierCascade cascade = Cv.Load<CvHaarClassifierCascade>(Const.XmlHaarcascade))  // 아무거나 가능

                    using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Application.StartupPath + "\\" + "haarcascade_frontalface_alt.xml"))                  //
                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            storage.Clear();

                            // 얼굴의 검출

                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(30, 30), new CvSize(180, 180));

                            // 검출한 얼굴에 원을 그린다
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvRect  r      = faces[i].Value.Rect;
                                CvPoint center = new CvPoint
                                {
                                    X = Cv.Round((r.X + r.Width * 0.5) * scale),
                                    Y = Cv.Round((r.Y + r.Height * 0.5) * scale)
                                };
                                int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale);
                                img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                            }
                        }
                    dst = img.Clone();
                    pictureBoxIpl2.ImageIpl = dst;
                }
        }
    // Update is called once per frame
    void Update()
    {
        IplImage frame = Cv.QueryFrame(capture);

        using (IplImage img = Cv.CloneImage(frame))
            using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
            {
                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                {
                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                    Cv.EqualizeHist(smallImg, smallImg);
                }

                using (CvMemStorage storage = new CvMemStorage())
                {
                    storage.Clear();

                    CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(64, 64));

                    for (int i = 0; i < faces.Total; i++)
                    {
                        CvRect  r      = faces[i].Value.Rect;
                        CvPoint center = new CvPoint
                        {
                            X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                            Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                        };
                        int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                        img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                    }

                    if (faces.Total > 0)
                    {
                        CvRect r = faces[0].Value.Rect;
                        facepos = new Vector2((r.X + r.Width / 2.0f) / CAPTURE_WIDTH, (r.Y + r.Height / 2.0f) / CAPTURE_HEIGHT);
                    }
                    else
                    {
                        facepos = Vector2.zero;
                    }

                    if (facepos.x >= 0.2 && facepos.x <= 0.7 && facepos.y >= 0.2 && facepos.x <= 0.7)
                    {
                        isFaceInCapture = true;
                    }
                    else
                    {
                        isFaceInCapture = false;
                    }
                }

                Cv.ShowImage("FaceDetect", img);
            }
    }
        ///////////////////////
        public static IplImage FaceDetect(IplImage src)
        {
            IplImage FindFace;

            // CvHaarClassifierCascade, cvHaarDetectObjects
            // 얼굴을 검출하기 위해서 Haar 분류기의 캐스케이드를 이용한다
            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };
            const double scale        = 1;
            const double scaleFactor  = 1.139;
            const int    minNeighbors = 2;
            IplImage     img          = src.Clone();
            IplImage     smallImg     = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 3);
            {
                // 얼굴 검출용의 화상의 생성
                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                {
                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                    Cv.EqualizeHist(smallImg, smallImg);
                }
                using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\haarcascade_frontalface_default.xml"))
                    using (CvMemStorage storage = new CvMemStorage())
                    {
                        storage.Clear();
                        // 얼굴의 검출
                        CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(24, 24));
                        // 검출한 얼굴에 원을 그린다
                        for (int i = 0; i < faces.Total; i++)
                        {
                            CvRect  r      = faces[i].Value.Rect;
                            CvPoint center = new CvPoint
                            {
                                X = Cv.Round((r.X + r.Width * 0.5) * scale),
                                Y = Cv.Round((r.Y + r.Height * 0.5) * scale)
                            };
                            int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale);
                            img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                        }
                    }
                FindFace = img.Clone();
                return(FindFace);
            }
        }
Beispiel #9
0
        public List <T> DetectFeatures(Bitmap b)
        {
            LoadFiles();
            List <T> features;

            //Type Intializer Exception occurs if you reuse an appdomain. Always restart the server.
            using (IplImage orig = OpenCvSharp.BitmapConverter.ToIplImage(b))
                using (IplImage gray = new IplImage(orig.Size, BitDepth.U8, 1)) {
                    //Make grayscale version
                    Cv.CvtColor(orig, gray, ColorConversion.BgrToGray);

                    int    w = orig.Width; int h = orig.Height;
                    double ratio = (double)w / (double)h;
                    double scale = 1;
                    if (ratio > 1)
                    {
                        scale = (double)w / (double)scaledBounds;
                    }
                    if (ratio <= 1)
                    {
                        scale = (double)h / (double)scaledBounds;
                    }
                    scale = Math.Min(1, 1 / scale);


                    using (IplImage small = new IplImage(new CvSize(Cv.Round(w * scale), Cv.Round(h * scale)), BitDepth.U8, 1)) {
                        //Resize to smaller version
                        Cv.Resize(gray, small, Interpolation.Area);
                        //Equalize histogram
                        Cv.EqualizeHist(gray, gray);

                        using (CvMemStorage storage = new CvMemStorage()) {
                            storage.Clear();
                            features = DetectFeatures(small, storage);
                        }
                    }
                    //Scale all rectangles by factor to restore to original resolution
                    for (int i = 0; i < features.Count; i++)
                    {
                        IFeature e = features[i];
                        e.Y  = (float)Math.Min(h, e.Y / scale);
                        e.X  = (float)Math.Min(w, e.X / scale);
                        e.Y2 = (float)Math.Min(h, e.Y2 / scale);
                        e.X2 = (float)Math.Min(w, e.X2 / scale);
                    }
                }
            return(features);
        }
        /// <summary>
        /// 顔の位置を取得
        /// </summary>
        /// <param name="headPosition">スケルトンの頭の位置座標</param>
        /// <returns>顔座標</returns>
        private Rect CheckFacePosition(ColorImagePoint headPosition)
        {
            //切り取る領域の範囲
            int snipWidth  = 200;
            int snipHeight = 200;

            // 返却用Rect (初期値はスケルトンの頭の座標とimage2画像の幅)
            Rect reRect = new Rect(headPosition.X, headPosition.Y,
                                   image2.Width, image2.Height);

            storage.Clear();
            openCVGrayImage.ResetROI();                                           // たまにROIがセットされた状態で呼ばれるためROIをリセット

            openCVImage.CopyFrom(outputImage);                                    // WriteableBitmap -> IplImage
            Cv.CvtColor(openCVImage, openCVGrayImage, ColorConversion.BgrToGray); // 画像をグレイスケール化
            Cv.EqualizeHist(openCVGrayImage, openCVGrayImage);                    // 画像の平滑化

            // 顔認識
            try {
                // 画像の切り取り
                var snipImage = SnipFaceImage(openCVGrayImage, headPosition, snipWidth, snipHeight);

                if (snipImage != null)
                {
                    CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(snipImage, cascade, storage);

                    // 顔を検出した場合
                    if (faces.Total > 0)
                    {
                        reRect.X      = faces[0].Value.Rect.X + (headPosition.X - snipWidth / 2);
                        reRect.Y      = faces[0].Value.Rect.Y + (headPosition.Y - snipHeight / 2);
                        reRect.Width  = faces[0].Value.Rect.Width;
                        reRect.Height = faces[0].Value.Rect.Height;
                    }
                }
            }
            catch (Exception) { }

            return(reRect);
        }
Beispiel #11
0
        private bool FaceDetect(string sandboxPath, string cascadeXmlPath)
        {
            //using (IplImage img = new IplImage(sandboxPath, LoadMode.Color))
            try {
                //using (IplImage img = Cv.LoadImage(sandboxPath, LoadMode.Color))
                using (IplImage img = Cv.LoadImage(sandboxPath, LoadMode.GrayScale))
                {
                    using (IplImage gray = Cv.CreateImage(new CvSize(img.Width, img.Height), BitDepth.U8, 1))
                    {
                        //Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, img, Interpolation.Linear);
                        Cv.EqualizeHist(img, img);
                        //EqualizeHist = 히스토그램 평활화. 그레이 이미지를 특출나게 어둡거나 밝은 부분을 적당히 펴줘서 전체 값이 일정하게 되도록 해줌
                    }

                    using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(cascadeXmlPath))
                        //haarcascade_frontalface_alt2.xml얼굴 검출에 대한 기계학습 자료가 담겨져 있음.
                        //이자료와 비교해서 비슷하면 얼굴로 인식하게 되어 있음
                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            storage.Clear();
                            //얼굴의 검출
                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(img, cascade, storage, 1.1, 2, 0, new CvSize(30, 30));
                            //검출된 얼굴을 facesdp 저장. faces.Total에 총 검풀된 얼굴수가 들어가 있음
                            Console.WriteLine("-----");
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvAvgComp comp = (CvAvgComp)Cv.GetSeqElem(faces, i);
                                Console.WriteLine(comp.Rect.TopLeft + " " + comp.Rect.Width + " " + comp.Rect.Height);
                            }
                            return(faces.Total > 0);
                        }
                }
            }
            catch (Exception e)
            {
                System.Console.WriteLine(e);
                return(false);
            }
        }
Beispiel #12
0
        private List <ObjRect> DetectFeaturesInFace(IplImage img, CvMemStorage storage, CvAvgComp face)
        {
            List <ObjRect> eyes = new List <ObjRect>();

            storage.Clear();
            //Take the top 4/8ths of the face as the region of interest
            CvRect r = face.Rect;

            r.Height = Cv.Round((double)r.Height / 2);
            img.SetROI(r);

            //Look for pairs there
            CvAvgComp[] pairs = Cv.HaarDetectObjects(img, Cascades["EyePair22"], storage, 1.0850, 2, 0, new CvSize(r.Width < 50 ? 11 : 22, r.Width < 50 ? 3 : 5)).ToArrayAndDispose();
            //Array.Sort<CvAvgComp>(pairs, CompareByNeighbors);

            //Look for individual eyes if no pairs were found
            if (pairs.Length == 0)
            {
                //Drop 1/2.75th off the top, leaving us with a full-width rectangle starting at 1/5.5th and stopping at 1/2th of face height.
                int aFifth = Cv.Round((double)r.Height * 2 / 5.5);
                r.Y      += aFifth;
                r.Height -= aFifth;

                eyes.AddRange(DetectEyesInRegion(img, storage, r));
            }
            //If there are pairs, evalutate them all. Finding eyes within multiple pairs is unlikely
            for (var i = 0; i < pairs.Length; i++)
            {
                CvAvgComp pair = pairs[i]; //Adjust for ROI
                pair.Rect.X += r.X;
                pair.Rect.Y += r.Y;
                eyes.AddRange(DetectFeaturesInPair(img, storage, pair));
            }
            if (eyes.Count > 0)
            {
                eyes.Add(new ObjRect(face.Rect.ToRectangleF(), FeatureType.Face));
            }
            return(eyes);
        }
        static void Main(string[] args)
        {
            CvCapture camera = new CvCapture(0);
            CvWindow  win    = new CvWindow();

            CvHaarClassifierCascade face_classifier = CvHaarClassifierCascade.FromFile("haarcascade_frontalface_alt.xml");
            CvMemStorage            storage         = new CvMemStorage();

            while (CvWindow.WaitKey(10) < 0)
            {
                using (IplImage img = camera.QueryFrame())
                {
                    storage.Clear();
                    CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(img, face_classifier, storage, 1.5, 1, HaarDetectionType.ScaleImage, new CvSize(50, 50));
                    for (int i = 0; i < faces.Total; i++)
                    {
                        img.Rectangle(faces[i].Value.Rect, CvColor.Red);
                    }

                    win.Image = img;
                }
            }
        }
        public static IplImage testContours(IplImage target)
        {
            if (g_storage == null)
            {
                g_gray    = new IplImage(target.Size, BitDepth.U8, 1);
                g_binary  = new IplImage(target.Size, BitDepth.U8, 1);
                g_storage = new CvMemStorage(0);
            }
            else
            {
                g_storage.Clear();
            }

            CvSeq <CvPoint> contours;

            target.CvtColor(g_gray, ColorConversion.BgrToGray);

            g_gray.Threshold(g_gray, g_thresh, 255, ThresholdType.Binary);
            g_gray.Copy(g_binary);

            g_gray.FindContours(g_storage, out contours, CvContour.SizeOf, ContourRetrieval.CComp);

            g_gray.Zero();

            if (contours != null)
            {
                contours.ApproxPoly(CvContour.SizeOf, g_storage, ApproxPolyMethod.DP, 3, true);
                g_gray.DrawContours(contours, new CvScalar(255), new CvScalar(128), 100);
            }


            //g_gray.Dilate(g_gray, null, 2);
            //g_gray.Erode(g_gray, null, 2);

            return(g_gray);
        }
        private FaceImage FaceDetect(FaceImage faceImage)
        {
            if (faceImage.FullName == null || File.Exists(faceImage.FullName) == false || faceImage.FullName == string.Empty)
            {
                throw new ArgumentException("Image's path is not valid: {0}.", faceImage.FullName);
            }

            var facesDir = CreateDirByShortName(Strings.Face_Database_Folder_Name);

            var fullName = CreateFaceFileFullName(faceImage, facesDir);

            faceImage.FullName = fullName;

            if (File.Exists(fullName))
            {
                return(faceImage);
            }

            try
            {
                using (IplImage img = new IplImage(faceImage.FullName, LoadMode.AnyColor))
                {
                    using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                    {
                        using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                        {
                            Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                            Cv.Resize(gray, smallImg, Interpolation.Linear);
                            Cv.EqualizeHist(smallImg, smallImg);
                        }

                        //using (CvHaarClassifierCascade cascade = Cv.Load<CvHaarClassifierCascade>(Const.XmlHaarcascade))
                        using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Constants.FaceCascadeDefFileName))
                            using (CvMemStorage storage = new CvMemStorage())
                            {
                                storage.Clear();

                                Stopwatch         watch = Stopwatch.StartNew();
                                CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, /*1.2*/ ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));//(30,30)
                                watch.Stop();

                                DetectionTime = watch.ElapsedMilliseconds; //detection time (ms)

                                for (int i = 0; i < faces.Total; i++)
                                {
                                    CvRect r = faces[i].Value.Rect;
                                    //CvPoint center = new CvPoint
                                    //{
                                    //    X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                    //    Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                    //};
                                    //int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                    //img.Circle(center, radius, new CvColor(0, 0, 255), 3, LineType.AntiAlias, 0);

                                    smallImg.ROI = new CvRect(r.X, r.Y, r.Width, r.Height);

                                    IplImage subImage = smallImg.GetSubImage(smallImg.ROI);

                                    _face = BitmapConverter.ToBitmap(subImage);

                                    FaceImage newFaceImage = SaveFace(faceImage);
                                    return(newFaceImage);
                                }
                            }
                    }
                }
            }
            catch (Exception ex)
            {
                bool rethrow = ExceptionPolicy.HandleException(ex, "BusinessLogin Policy");
                if (rethrow)
                {
                    throw;
                }

                MessageBox.Show(string.Format("Failed to separate face from image"));
            }
            return(null);
        }
Beispiel #16
0
        //선택창 확인버튼 누르면 -> 영상인식 넘어가야함
        private void Confirm_Click(object sender, EventArgs e)
        {
            pictureBox1.Hide();
            pictureBox2.Hide();
            pictureBox4.Hide();
            M_Box.Hide();
            Confirm.Hide();
            Select_Beginner.Hide();
            Select_Master.Hide();
            pictureBox3.Show();
            MessageBox.Show("눈이 제대로 인식되었다면 ESC버튼을 눌러주세요");

            CvCapture camera = new CvCapture(0); // 카메라 생성
            CvWindow  win    = new CvWindow();   // 윈도우창 생성

            CvHaarClassifierCascade face_classifier =
                CvHaarClassifierCascade.FromFile("./haarcascade_frontalface_alt.xml"); // 얼굴 인식 Haar 알고리즘 불러오기

            CvHaarClassifierCascade eye_classifier =
                CvHaarClassifierCascade.FromFile("./haarcascade_eye.xml"); // 눈 인식 Haar 알고리즘 불러오기

            CvMemStorage storage_face = new CvMemStorage();                // 얼굴 저장 메모리
            CvMemStorage storage_eye  = new CvMemStorage();                // 눈 저장 메모리

            while (CvWindow.WaitKey(10) != 27)                             // <0 : 아무키나 누르면 종료, !=27 esc 누르면 종료
            {
                using (IplImage camera_img = camera.QueryFrame())
                {
                    storage_face.Clear();
                    storage_eye.Clear();

                    Cv.Flip(camera_img, camera_img, FlipMode.Y); // 좌우반전

                    CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(camera_img, face_classifier, storage_face, 1.5, 1,
                                                                   HaarDetectionType.ScaleImage, new CvSize(0, 0), new CvSize(200, 200)); // 얼굴 인식 동작

                    for (int i = 0; i < faces.Total; i++)
                    {
                        camera_img.Rectangle(faces[i].Value.Rect, CvColor.Red); // 인식 된 얼굴에 빨간 사각형 그리기

                        CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(camera_img, eye_classifier, storage_eye, 1.5, 1,
                                                                      HaarDetectionType.ScaleImage, new CvSize(35, 35), new CvSize(50, 50)); // 눈 인식 동작

                        for (int j = 0; j < eyes.Total; j++)                                                                                 // eyes.Total is changing continuously
                        {
                            if (eyes[j].Value.Rect.X > faces[i].Value.Rect.X && eyes[j].Value.Rect.Y > faces[i].Value.Rect.Y &&
                                eyes[j].Value.Rect.X + eyes[j].Value.Rect.Width < faces[i].Value.Rect.X + faces[i].Value.Rect.Width &&
                                eyes[j].Value.Rect.Y + eyes[j].Value.Rect.Height < (faces[i].Value.Rect.Y + faces[i].Value.Rect.Height) - 60)
                            {
                                camera_img.Rectangle(eyes[j].Value.Rect, CvColor.Yellow); // 인식 된 눈에 노란 사각형 그리기
                                Console.WriteLine("Recognition X, Y");
                                Console.WriteLine(">> eye X : {0}, eye Y : {1}", eyes[j].Value.Rect.X, eyes[j].Value.Rect.Y);

                                // 좌표 저장 (왼눈, 오른눈 랜덤으로 됨..) -> recognition 버튼에서 처음에 눈 좌표 저장, 이를 토대로 범위 벗어났는지 아닌지 판별
                                //info.area_X = eyes[j].Value.Rect.X;
                                //info.area_Y = eyes[j].Value.Rect.Y;
                                info.area_X = faces[i].Value.Rect.X;
                                info.area_Y = faces[i].Value.Rect.Y;
                                rect_Width  = faces[i].Value.Rect.Width;
                                rect_Height = faces[i].Value.Rect.Height;
                                Console.WriteLine(">> face X : {0}, face Y : {1}", faces[i].Value.Rect.X, faces[i].Value.Rect.Y);
                            }
                        }
                    }

                    win.Image = camera_img;
                }
            }

            win.Close();
            Cv.ReleaseCapture(camera);
            Console.WriteLine("메모리 해제");

            info.total_time = DateTime.Now; // 스타트 누를 때부터 초세기 (전체시간)

            timer1.Enabled = true;          // 타이머 동작

            timer2.Enabled = true;
            timer2.Tick   += new EventHandler(timer2_Tick);
            timer2.Start();

            if (info.cnt == 0) // 처음 start 버튼 누를 때 0.5초 뒤 바로 카메라 동작
            {
                timer1.Interval = 500;
            }

            timer1.Tick += new EventHandler(timer1_Tick);
            timer1.Start();
        }
    // Update is called once per frame
    void Update()
    {
        // Webカメラから1フレーム分の画像を取得
        _Frame = Cv.QueryFrame(_Capture);
        // Webカメラ画像をコピーしてSTEP1の入力画像に使用
        IplImage img = _Frame.Clone();
        // STEP1用変数
        IplImage smoothed = new IplImage(img.Size, BitDepth.U8, 3);
        // STEP2用変数
        IplImage hsv = new IplImage(img.Size, BitDepth.U8, 3);
        // STEP3用変数
        IplImage     segmented = new IplImage(img.Size, BitDepth.U8, 1);
        CvMemStorage storage   = new CvMemStorage();
        // STEP4用変数
        IplImage imgTmp     = new IplImage(img.Size, BitDepth.U8, 1);
        IplImage morphology = new IplImage(img.Size, BitDepth.U8, 1);
        // STEP5用変数
        CvSeq <CvPoint> contours;
        IplImage        detected = _Frame.Clone();

        // STEP1: ノイズ除去
        Cv.Smooth(img, smoothed, SmoothType.Blur, 1);
        //Cv.ShowImage("window",smoothed);
        //Cv.Smooth(smoothed, smoothed, SmoothType.Gaussian,1);

        // STEP2: 色をRGBからHSVに変換
        Cv.CvtColor(smoothed, hsv, ColorConversion.BgrToHsv);
        //Cv.ShowImage("window",hsv);
        // STEP3: 領域分割
        storage.Clear();
        Cv.InRangeS(hsv,
                    new CvScalar((pointhsv.Val0) - RANGE_H,
                                 (pointhsv.Val1) - RANGE_S,
                                 (pointhsv.Val2) - RANGE_V),
                    new CvScalar((pointhsv.Val0) + RANGE_H,
                                 (pointhsv.Val1) + RANGE_S,
                                 (pointhsv.Val2) + RANGE_V),
                    segmented);
        //Cv.ShowImage("window",segmented);
        // STEP4: ノイズ除去

        Cv.Dilate(segmented, imgTmp);
        Cv.Erode(imgTmp, imgTmp);

        Cv.Erode(imgTmp, imgTmp);
        Cv.Dilate(imgTmp, morphology);

        //Cv.ShowImage("window",morphology);

        // STEP5: 円の検出
        Cv.FindContours(morphology, storage, out contours,
                        CvContour.SizeOf, ContourRetrieval.Tree,
                        ContourChain.ApproxNone);

        if (contours == null)
        {
            Debug.Log("PSMove is not detected");
        }
        else
        {
            contours = Cv.ApproxPoly(contours, CvContour.SizeOf,
                                     storage, ApproxPolyMethod.DP,
                                     Cv.ContourPerimeter(contours) * CTR_PARAM, true);

            Cv.DrawContours(morphology, contours,
                            new CvScalar(MAX_G, 0, 0), new CvScalar(0, MAX_B, 0), 3, -1);

            Cv.MinEnclosingCircle(contours, out _Center, out _Radius);
            Cv.DrawCircle(morphology, _Center, 2, new CvScalar(0, MAX_B, 0));

            // STEP6: 画像をウィンドウに出力
            Sz = fx * SPHERE_R / _Radius;

            Sx = -((_Center.X - ux) * Sz) / fx;
            Sy = -((_Center.Y - uy) * Sz) / fy;
        }

        _Window.ShowImage(_Frame);
        Cv.ShowImage("Original", img);
        Cv.ShowImage("STEP1:Smoothing", smoothed);
        Cv.ShowImage("STEP2:HSV", hsv);
        Cv.ShowImage("STEP3:Segmentation", segmented);
        Cv.ShowImage("STEP4:Morphology", morphology);
        Cv.ShowImage("STEP5:Detected", detected);
    }
Beispiel #18
0
        // 타이머 함수
        private void timer1_Tick(object sender, EventArgs e)
        {
            if (info.cnt >= 0) // 2번째부터는 카메라 5초 뒤 실행
            {
                timer1.Interval = 5000;
            }

            CvCapture camera = new CvCapture(0); // 카메라 생성
            CvWindow  win    = new CvWindow();   // 윈도우창 생성

            //CvHaarClassifierCascade face_classifier =
            //    CvHaarClassifierCascade.FromFile("C:\\김유민\\Visual Studio 2017\\haarcascade_frontalface_alt.xml"); // 얼굴 인식 Haar 알고리즘 불러오기

            //CvHaarClassifierCascade eye_classifier =
            //    CvHaarClassifierCascade.FromFile("C:\\김유민\\Visual Studio 2017\\haarcascade_eye.xml"); // 눈 인식 Haar 알고리즘 불러오기

            CvHaarClassifierCascade face_classifier =
                CvHaarClassifierCascade.FromFile("./haarcascade_frontalface_alt.xml"); // 얼굴 인식 Haar 알고리즘 불러오기

            CvHaarClassifierCascade eye_classifier =
                CvHaarClassifierCascade.FromFile("./haarcascade_eye.xml"); // 눈 인식 Haar 알고리즘 불러오기

            CvMemStorage storage_face = new CvMemStorage();                // 얼굴 저장 메모리
            CvMemStorage storage_eye  = new CvMemStorage();                // 눈 저장 메모리

            bool check = true;

            info.time = DateTime.Now;
            TimeSpan timecal = DateTime.Now - info.time;

            while (CvWindow.WaitKey(10) != 27 && check) // <0 : 아무키나 누르면 종료, !=27 esc 누르면 종료
            {
                using (IplImage camera_img = camera.QueryFrame())
                {
                    storage_face.Clear();
                    storage_eye.Clear();

                    Cv.Flip(camera_img, camera_img, FlipMode.Y); // 좌우반전

                    CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(camera_img, face_classifier, storage_face, 1.5, 1,
                                                                   HaarDetectionType.ScaleImage, new CvSize(0, 0), new CvSize(200, 200)); // 얼굴 인식 동작

                    for (int i = 0; i < faces.Total; i++)
                    {
                        camera_img.Rectangle(faces[i].Value.Rect, CvColor.Red); // 인식 된 얼굴에 빨간 사각형 그리기

                        CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(camera_img, eye_classifier, storage_eye, 1.5, 1,
                                                                      HaarDetectionType.ScaleImage, new CvSize(35, 35), new CvSize(50, 50)); // 눈 인식 동작

                        for (int j = 0; j < eyes.Total; j++)                                                                                 // eyes.Total is changing continuously
                        {
                            if (eyes[j].Value.Rect.X > faces[i].Value.Rect.X && eyes[j].Value.Rect.Y > faces[i].Value.Rect.Y &&
                                eyes[j].Value.Rect.X + eyes[j].Value.Rect.Width < faces[i].Value.Rect.X + faces[i].Value.Rect.Width &&
                                eyes[j].Value.Rect.Y + eyes[j].Value.Rect.Height < (faces[i].Value.Rect.Y + faces[i].Value.Rect.Height) - 60)
                            {
                                camera_img.Rectangle(eyes[j].Value.Rect, CvColor.Yellow); // 인식 된 눈에 노란 사각형 그리기
                                //Console.WriteLine("eyes : {0}", eyes[j]);
                                Console.WriteLine("Comparing X, Y with Recognition X, Y");
                                Console.WriteLine(">> eye  X : {0}, eye  Y : {1}", eyes[j].Value.Rect.X, eyes[j].Value.Rect.Y);

                                // 좌표 저장 (왼눈, 오른눈 랜덤으로 됨..)
                                //info.eye_X = eyes[j].Value.Rect.X;
                                //info.eye_Y = eyes[j].Value.Rect.Y;
                                info.eye_X = faces[i].Value.Rect.X;
                                info.eye_Y = faces[i].Value.Rect.Y;
                                Console.WriteLine(">> face X : {0}, face Y : {1}", faces[i].Value.Rect.X, faces[i].Value.Rect.Y);
                                Cv.DrawRect(camera_img, info.area_X, info.area_Y, info.area_X + rect_Width, info.area_Y + rect_Height, CvColor.Green);
                            }
                        }
                    }

                    win.Image = camera_img;

                    // 영상 동작 시간
                    timecal = DateTime.Now - info.time;

                    //if (timecal.Minutes == info.minutes) // info.minutes(설정한 분) 뒤에 check가 false로 변하면서 카메라 자동 꺼짐
                    if (timecal.Seconds == 3)
                    {
                        check = false;
                    }
                }

                timer1.Stop();
            }

            // 눈 범위 설정 -> 벗어나면 label 보이기, 숨기기
            if (info.eye_X < info.area_X - 15 || info.eye_X > info.area_X + 15 || info.eye_Y < info.area_Y - 15 || info.eye_Y > info.area_Y + 15)
            {
                info.wrongCount++;

                if (info.wrongCount == 2)
                {
                    Console.Beep(512, 300);
                    Console.Beep(650, 300);
                    Console.Beep(768, 300);
                    System.Windows.Forms.MessageBox.Show("자세를 바르게 하세요");
                    timer1.Stop();
                    info.wrongCount = 0;
                }
                //else
                //{
                //    timer1.Start();
                //}
            }

            info.cnt++;
            win.Close();
            Cv.ReleaseCapture(camera); // 메모리 해제
            //camera.Dispose(); // 메모리 해제
            Console.WriteLine("메모리 해제");
            Console.WriteLine("저장된 좌표 X : {0}, Y : {1}", info.eye_X, info.eye_Y);
            Console.WriteLine("");

            timer1.Start();

            if (flag == 1)
            {
                timer1.Stop();
            }
        }
Beispiel #19
0
        void timer2_test(object sender, EventArgs e)/*face recognize (1frame)*/
        {
            decide_frontface_csv = 1;

            var bmp = videoSourcePlayer1.GetCurrentVideoFrame();

            bmp.Save("a.bmp");

            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double Scale        = 1.14;
            const double ScaleFactor  = 1.0850;
            const int    MinNeighbors = 2;

            //using (IplImage img = new IplImage(@"C:\Yalta.jpg", LoadMode.Color))
            using (IplImage img = new IplImage(@"a.bmp", LoadMode.Color))
                //using (IplImage img = new IplImage(@"C:\Lenna.jpg", LoadMode.Color))
                using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                {
                    // 顔検出用の画像の生成
                    using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, smallImg, Interpolation.Linear);
                        Cv.EqualizeHist(smallImg, smallImg);
                    }

                    //using (CvHaarClassifierCascade cascade = Cv.Load<CvHaarClassifierCascade>(Const.XmlHaarcascade))  // どっちでも可
                    using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_frontalface_default.xml")) //
                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            storage.Clear();

                            // 顔の検出
                            Stopwatch         watch = Stopwatch.StartNew();
                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(100, 100), new CvSize(1000, 1000)); //new CvSize(30, 30)
                            watch.Stop();
                            //Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds);

                            // 検出した箇所にまるをつける
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvRect  r      = faces[i].Value.Rect;
                                CvPoint center = new CvPoint
                                {
                                    X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                    Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                };
                                int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);

                                if (radius >= 50)
                                {
                                    frontface_check_num++;
                                    break;
                                }

                                System.Console.WriteLine("radius = {0}!!!!!!!!!!!!\n", radius);
                            }
                        }
                    // CvWindow.ShowImages(img);
                }

            frametest++;
        }
Beispiel #20
0
        static void Main()
        {
            // CvCapture cap = CvCapture.FromFile("video.avi");
            CvCapture cap = CvCapture.FromFile("road_3.avi");

            CvWindow w     = new CvWindow("Lane Detection");
            CvWindow canny = new CvWindow("Lane Detection_2");
            CvWindow hough = new CvWindow("Lane Detection");
            //   CvWindow smoothing = new CvWindow("Lane Detection_3");


            IplImage                src, gray, dstCanny, halfFrame, smallImg;
            CvMemStorage            storage = new CvMemStorage();
            CvSeq                   lines;
            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_cars3.xml");

            const double Scale        = 2.0;
            const double ScaleFactor  = 1.05;
            const int    MinNeighbors = 3;
            double       min_range    = 70;
            double       max_range    = 120;


            CvSeq <CvAvgComp> cars;

            while (CvWindow.WaitKey(10) < 0)
            {
                src       = cap.QueryFrame();
                halfFrame = new IplImage(new CvSize(src.Size.Width / 2, src.Size.Height / 2), BitDepth.U8, 3);
                Cv.PyrDown(src, halfFrame, CvFilter.Gaussian5x5);


                gray = new IplImage(src.Size, BitDepth.U8, 1);

                dstCanny = new IplImage(src.Size, BitDepth.U8, 1);

                /*
                 *
                 * smallImg = new IplImage(new CvSize(Cv.Round(src.Width / Scale), Cv.Round(src.Height / Scale)), BitDepth.U8, 1);
                 * using (IplImage grey = new IplImage(src.Size, BitDepth.U8, 1))
                 * {
                 * Cv.CvtColor(src, grey, ColorConversion.BgrToGray);
                 * Cv.Resize(grey, smallImg, Interpolation.Linear);
                 * Cv.EqualizeHist(smallImg, smallImg);
                 * }
                 *
                 * cars = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, HaarDetectionType.DoCannyPruning, new CvSize(30, 30));
                 *
                 * for (int i = 0; i < cars.Total; i++)
                 * {
                 * CvRect r = cars[i].Value.Rect;
                 * CvPoint center = new CvPoint
                 * {
                 *     X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                 *     Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                 * };
                 * int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                 * src.Circle(center, radius, CvColor.Blue, 2, LineType.AntiAlias, 0);
                 * } */

                // Crop off top half of image since we're only interested in the lower portion of the video
                int halfWidth  = src.Width / 2;
                int halfHeight = src.Height / 2;
                int startX     = halfWidth - (halfWidth / 2);
                src.SetROI(new CvRect(0, halfHeight - 0, src.Width - 1, src.Height - 1));

                gray.SetROI(src.GetROI());
                dstCanny.SetROI(src.GetROI());

                src.CvtColor(gray, ColorConversion.BgrToGray);
                Cv.Smooth(gray, gray, SmoothType.Gaussian, 5, 5);
                Cv.Canny(gray, dstCanny, 50, 200, ApertureSize.Size3);

                storage.Clear();
                lines = dstCanny.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 100);

                for (int i = 0; i < lines.Total; i++)
                {
                    CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value;

                    int    dx    = elem.P2.X - elem.P1.X;
                    int    dy    = elem.P2.Y - elem.P1.Y;
                    double angle = Math.Atan2(dy, dx) * 180 / Math.PI;

                    //   if (Math.Abs(angle) <= 10)
                    //     continue;

                    if (elem.P1.Y > elem.P2.Y + 50 || elem.P1.Y < elem.P2.Y - 50)
                    {
                        src.Line(elem.P1, elem.P2, CvColor.Green, 9, LineType.Link8, 0);
                    }
                }

                src.ResetROI();


                storage.Clear();
                w.Image = src;
                // canny.Image = dstCanny;
                // smoothing.Image = gray;
                //    w.Image = dstCanny;
                //  w.Image = dstCanny;
            }
        }
Beispiel #21
0
        /// <summary>
        /// Detects features on a grayscale image.
        /// </summary>
        /// <param name="img"></param>
        /// <param name="storage"></param>
        /// <returns></returns>
        protected override List <Face> DetectFeatures(IplImage img, CvMemStorage storage)
        {
            //Determine minimum face size
            var minSize = Math.Max(12, (int)Math.Round((double)MinSizePercent / 100.0 * Math.Min(img.Width, img.Height)));


            //Detect faces (frontal).
            Stopwatch watch = Stopwatch.StartNew();


            CvAvgComp[] faces = BorrowCascade("FaceCascadeAlt", c => Cv.HaarDetectObjects(img, c, storage, 1.0850, MinConfidenceLevel, HaarDetectionType.DoCannyPruning, new CvSize(minSize, minSize), new CvSize(0, 0)).ToArrayAndDispose());

            //Sort by accuracy
            Array.Sort <CvAvgComp>(faces, CompareByNeighbors);

            //Convert into feature objects list
            List <Face> features = new List <Face>(faces.Length);

            foreach (CvAvgComp face in faces)
            {
                features.Add(new Face(PolygonMath.ScaleRect(face.Rect.ToRectangleF(), ExpandX, ExpandY), face.Neighbors));
            }

            // Doesn't add much, and would have to be deduplicated.
            //CvAvgComp[] profiles = BorrowCascade("FaceProfile", c => Cv.HaarDetectObjects(img, c, storage, 1.2, MinConfidenceLevel + 2, HaarDetectionType.FindBiggestObject | HaarDetectionType.DoRoughSearch | HaarDetectionType.DoCannyPruning, new CvSize(img.Width / 8, img.Height / 8), new CvSize(0, 0)).ToArrayAndDispose());
            //foreach (CvAvgComp face in profiles) features.Add(new Face(PolygonMath.ScaleRect(face.Rect.ToRectangleF(), ExpandX, ExpandY), face.Neighbors));


            // Test for eyes, if faces > 20 pixels
            foreach (var face in features)
            {
                var w = (int)(face.X2 - face.X);
                var h = (int)((face.Y2 - face.Y) * 0.6);
                if (w > 20)
                {
                    img.SetROI((int)face.X, (int)face.Y, w, h);
                    storage.Clear();
                    CvAvgComp[] eyes = BorrowCascade("Eye",
                                                     c => Cv.HaarDetectObjects(img, c, storage, 1.0850, 4, HaarDetectionType.FindBiggestObject | HaarDetectionType.DoRoughSearch,
                                                                               new CvSize(4, 4), new CvSize(img.Width / 2, img.Height / 2))
                                                     .ToArrayAndDispose());
                    if (eyes.Length == 0)
                    {
                        // Halve the estimated accuracy if there are no eyes detected
                        face.Accuracy = face.Accuracy / 2;
                        // We never want to boost accuracy, because the walls have eyes
                    }
                }
            }



            //Unless we're below MinFaces, filter out the low confidence matches.
            while (features.Count > MinFaces && features[features.Count - 1].Accuracy < ConfidenceLevelThreshold)
            {
                features.RemoveAt(features.Count - 1);
            }


            watch.Stop();
            totalTime += watch.ElapsedMilliseconds;
            count++;
            Debug.WriteLine($"Face detection time: {watch.ElapsedMilliseconds}ms  (avg {totalTime / count}ms)");


            //Never return more than [MaxFaces]
            return((features.Count > MaxFaces) ? features.GetRange(0, MaxFaces) : features);
        }
Beispiel #22
0
        private List <ObjRect> DetectEyesInRegion(IplImage img, CvMemStorage storage, CvRect region)
        {
            List <ObjRect> eyes = new List <ObjRect>();

            //Split the region into two overlapping rectangles
            CvRect leftEye = region;

            leftEye.Width = (int)(leftEye.Width * 0.6);

            CvRect rightEye = region;

            rightEye.Width = (int)(rightEye.Width * 0.6);
            rightEye.X    += (int)(region.Width * 0.4);

            //If the eye pair or face is small enough, use 3 instead of 5
            int    minEyeLength = region.Width < 80 ? 3 : 5;
            CvSize minEyeSize   = new CvSize(minEyeLength, minEyeLength);

            List <object[]> vars = new List <object[]>();

            vars.Add(new object[] { 0, 3, 0.5f });
            vars.Add(new object[] { 0, 3, 0.7f });
            vars.Add(new object[] { 0, 3, 1.0f });
            vars.Add(new object[] { 0, 2, 0.5f });
            vars.Add(new object[] { 0, 2, 0.7f });
            vars.Add(new object[] { 0, 2, 1.0f });
            vars.Add(new object[] { 0, 1, 0.5f });
            vars.Add(new object[] { 0, 1, 0.7f });
            vars.Add(new object[] { 1, 1, 1.0f });
            vars.Add(new object[] { 1, 1, 0.5f });
            vars.Add(new object[] { 1, 1, 0.7f });
            vars.Add(new object[] { 1, 1, 1.0f });

            bool foundLeft = false, foundRight = false;

            foreach (object[] vals in vars)
            {
                CvRect left = leftEye;
                left.Y     += (int)((float)left.Height * (float)vals[2] / 2.0);
                left.Height = (int)((float)left.Height * (float)vals[2]);
                CvRect right = rightEye;
                right.Height = left.Height;
                right.Y      = left.Y;

                if (!foundLeft)
                {
                    //Search for eyes
                    storage.Clear();
                    img.SetROI(left);
                    CvAvgComp[] leyes = Cv.HaarDetectObjects(img, (int)vals[0] == 0 ? Cascades["RightEyeCascade"] : Cascades["Eye"], storage, 1.0850, (int)vals[1], 0, minEyeSize).ToArrayAndDispose();
                    //Array.Sort<CvAvgComp>(leyes, CompareByNeighbors);

                    if (leyes.Length > 0)
                    {
                        eyes.Add(new ObjRect(leyes[0].Rect.Offset(left.Location).ToRectangleF(), FeatureType.Eye));
                        minEyeSize = new CvSize(leyes[0].Rect.Width / 4, leyes[0].Rect.Width / 4);
                        foundLeft  = true;
                    }
                }

                if (!foundRight)
                {
                    storage.Clear();
                    img.SetROI(right);
                    CvAvgComp[] reyes = Cv.HaarDetectObjects(img, (int)vals[0] == 0 ? Cascades["LeftEyeCascade"] : Cascades["Eye"], storage, 1.0850, (int)vals[1], 0, minEyeSize).ToArrayAndDispose();
                    //Array.Sort<CvAvgComp>(reyes, CompareByNeighbors);

                    if (reyes.Length > 0)
                    {
                        eyes.Add(new ObjRect(reyes[0].Rect.Offset(right.Location).ToRectangleF(), FeatureType.Eye));
                        minEyeSize = new CvSize(reyes[0].Rect.Width / 4, reyes[0].Rect.Width / 4);
                        foundRight = true;
                    }
                }
                if (foundLeft && foundRight)
                {
                    break;
                }
            }
            return(eyes);
        }
Beispiel #23
0
    // Update is called once per frame
    void Update()
    {
        if (pp == null)
        {
            return;
        }
        if (!pp.AcquireFrame(false))
        {
            return;
        }

        //IplImage frame = Cv.QueryFrame(capture);
        if (rgbTexture != null)
        {
            Debug.Log("rgb not null");
            if (pp.QueryRGB(rgbTexture))
            {
                Debug.Log("query rgb done");
                //rgbTexture.Apply();
                Debug.Log("de pixelo: " + rgbTexture.GetPixels()[0]);

                IplImage frame = Texture2DtoIplImage(rgbTexture);


                using (IplImage img = Cv.CloneImage(frame))
                    using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                    {
                        using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                        {
                            Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                            Cv.Resize(gray, smallImg, Interpolation.Linear);
                            Cv.EqualizeHist(smallImg, smallImg);
                        }

                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            storage.Clear();


                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(64, 64));


                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvRect  r      = faces[i].Value.Rect;
                                CvPoint center = new CvPoint
                                {
                                    X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                    Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                };
                                int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                            }

                            if (faces.Total > 0)
                            {
                                CvRect r = faces[0].Value.Rect;
                                //facepos = new Vector2((r.X + r.Width / 2.0f) / CAPTURE_WIDTH, (r.Y + r.Height / 2.0f) / CAPTURE_HEIGHT);
                            }
                        }
                        Cv.ShowImage("FaceDetect", img);
                    }
            }             // endif queryirasimage
            else
            {
                Debug.Log("failoo");
            }
        }         // endif rgbTexture !null
        else
        {
            Debug.Log("rgb NULL");
        }
        pp.ReleaseFrame();
    }