예제 #1
0
파일: MainForm.cs 프로젝트: mind0n/hive
        private void CaptureCameraCallback()
        {
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 1;
            CvSize       MinSize      = new CvSize(30, 30);

            CvCapture cap = CvCapture.FromCamera(1);
            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_eye.xml");

            while (true)
            {
                IplImage img = cap.QueryFrame();
                //IplImage.FromBitmap()
                //CvSeq<CvAvgComp> eyes = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), ScaleFactor, MinNeighbors, HaarDetectionType.DoCannyPruning, MinSize);

                //foreach (CvAvgComp eye in eyes.AsParallel())
                //{
                //    img.DrawRect(eye.Rect, CvColor.Red);

                //    if (eye.Rect.Left > pctCvWindow.Width / 2)
                //    {
                //        try
                //        {
                //            IplImage rightEyeImg1 = img.Clone();
                //            Cv.SetImageROI(rightEyeImg1, eye.Rect);
                //            IplImage rightEyeImg2 = Cv.CreateImage(eye.Rect.Size, rightEyeImg1.Depth, rightEyeImg1.NChannels);
                //            Cv.Copy(rightEyeImg1, rightEyeImg2, null);
                //            Cv.ResetImageROI(rightEyeImg1);


                //            Bitmap rightEyeBm = BitmapConverter.ToBitmap(rightEyeImg2);
                //            pctRightEye.Image = rightEyeBm;
                //        }
                //        catch { }
                //    }
                //    else
                //    {
                //        try
                //        {
                //            IplImage leftEyeImg1 = img.Clone();
                //            Cv.SetImageROI(leftEyeImg1, eye.Rect);
                //            IplImage leftEyeImg2 = Cv.CreateImage(eye.Rect.Size, leftEyeImg1.Depth, leftEyeImg1.NChannels);
                //            Cv.Copy(leftEyeImg1, leftEyeImg2, null);
                //            Cv.ResetImageROI(leftEyeImg1);

                //            Bitmap leftEyeBm = BitmapConverter.ToBitmap(leftEyeImg2);
                //            pctLeftEye.Image = leftEyeBm;
                //        }catch{}
                //    }
                //}

                Bitmap bm = BitmapConverter.ToBitmap(img);
                bm.SetResolution(pctCvWindow.Width, pctCvWindow.Height);
                //pctCvWindow.Image = bm;
                pb.Image = bm;
                img      = null;
                bm       = null;
                Thread.Sleep(100);
            }
        }
예제 #2
0
        public FaceDetect()
        {
            CheckMemoryLeak();

            // CvHaarClassifierCascade, cvHaarDetectObjects

            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double Scale        = 1.14;
            const double ScaleFactor  = 1.0850;
            const int    MinNeighbors = 2;

            using (IplImage img = new IplImage(FilePath.Image.Yalta, LoadMode.Color))
                using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                {
                    using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, smallImg, Interpolation.Linear);
                        Cv.EqualizeHist(smallImg, smallImg);
                    }

                    using (var cascade = CvHaarClassifierCascade.FromFile(FilePath.Text.HaarCascade))
                        using (var storage = new CvMemStorage())
                        {
                            storage.Clear();

                            // 顔の検出
                            Stopwatch         watch = Stopwatch.StartNew();
                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
                            watch.Stop();
                            Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds);

                            // 検出した箇所にまるをつける
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvRect  r      = faces[i].Value.Rect;
                                CvPoint center = new CvPoint
                                {
                                    X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                    Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                };
                                int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                            }
                        }

                    // ウィンドウに表示
                    CvWindow.ShowImages(img);
                }
        }
예제 #3
0
        public EyeDetect()
        {
            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double Scale        = 1.25;
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 2;

            using (CvCapture cap = CvCapture.FromCamera(1))
                using (CvWindow w = new CvWindow("Eye Tracker"))
                {
                    while (CvWindow.WaitKey(10) < 0)
                    {
                        using (IplImage img = cap.QueryFrame())
                            using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                            {
                                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                                {
                                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                                    Cv.EqualizeHist(smallImg, smallImg);
                                }

                                using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\Program Files\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml"))
                                    using (CvMemStorage storage = new CvMemStorage())
                                    {
                                        storage.Clear();

                                        Stopwatch         watch = Stopwatch.StartNew();
                                        CvSeq <CvAvgComp> eyes  = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
                                        watch.Stop();
                                        //Console.WriteLine("detection time = {0}msn", watch.ElapsedMilliseconds);

                                        for (int i = 0; i < eyes.Total; i++)
                                        {
                                            CvRect  r      = eyes[i].Value.Rect;
                                            CvPoint center = new CvPoint
                                            {
                                                X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                                Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                            };
                                            int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                            img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                                        }
                                    }

                                w.Image = img;
                            }
                    }
                }
        }
예제 #4
0
        public IplImage FaceDetection(IplImage src)
        {
            haarface = new IplImage(src.Size, BitDepth.U8, 3);
            Cv.Copy(src, haarface);

            gray = this.GrayScale(src);
            Cv.EqualizeHist(gray, gray);

            double scaleFactor  = 1.139;
            int    minNeighbors = 1;

            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("../../haarcascade_frontalface_alt.xml");
            CvMemStorage            Storage = new CvMemStorage();

            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(gray, cascade, Storage, scaleFactor, minNeighbors, HaarDetectionType.ScaleImage, new CvSize(90, 90), new CvSize(0, 0));

            for (int i = 0; i < faces.Total; i++)
            {
                CvRect r = faces[i].Value.Rect;

                int cX     = Cv.Round(r.X + r.Width * 0.5);
                int cY     = Cv.Round(r.Y + r.Height * 0.5);
                int radius = Cv.Round((r.Width + r.Height) * 0.25);

                Cv.DrawCircle(haarface, new CvPoint(cX, cY), radius, CvColor.Black, 3);
            }
            return(haarface);
        }
예제 #5
0
        public IplImage VehicleDetect(IplImage src)
        {
            haarvehicle = new IplImage(src.Size, BitDepth.U8, 3);
            Cv.Copy(src, haarvehicle);

            gray = new IplImage(src.Size, BitDepth.U8, 1);
            Cv.CvtColor(src, gray, ColorConversion.BgrToGray);

            Cv.EqualizeHist(gray, gray);

            double scaleFactor  = 1.139;
            int    minNeighbors = 1;

            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("../../../cars.xml");
            CvMemStorage            Storage = new CvMemStorage();

            CvSeq <CvAvgComp> vehicles = Cv.HaarDetectObjects(gray, cascade, Storage, scaleFactor, minNeighbors, HaarDetectionType.ScaleImage, new CvSize(90, 90), new CvSize(0, 0));

            for (int i = 0; i < vehicles.Total; i++)
            {
                CvRect r = vehicles[i].Value.Rect;

                int cX     = Cv.Round(r.X + r.Width * 0.5);
                int cY     = Cv.Round(r.Y + r.Height * 0.5);
                int radius = Cv.Round((r.Width + r.Height) * 0.25);

                //Cv.DrawCircle(haarvehicle, new CvPoint(cX, cY), radius, CvColor.Red, 3);
                Cv.DrawRect(haarvehicle, r, CvColor.Red, 5);
            }

            return(haarvehicle);
        }
예제 #6
0
        public System.Drawing.Bitmap FaceDetect(IplImage src)
        {
            // CvHaarClassifierCascade, cvHaarDetectObjects
            // 얼굴을 검출하기 위해서 Haar 분류기의 캐스케이드를 이용한다

            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double scale        = 1.04;
            const double scaleFactor  = 1.139;
            const int    minNeighbors = 1;

            using (IplImage img = src.Clone())
                using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 1))
                {
                    // 얼굴 검출을 위한 화상을 생성한다.
                    using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, smallImg, Interpolation.Linear);
                        Cv.EqualizeHist(smallImg, smallImg);
                    }

                    using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Environment.CurrentDirectory + "\\" + "haarcascade_frontalface_alt.xml"))
                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            storage.Clear();

                            // 얼굴을 검출한다.
                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(20, 20));

                            // 검출한 얼굴에 검은색 원을 덮어씌운다.
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvRect  r      = faces[i].Value.Rect;
                                CvPoint center = new CvPoint
                                {
                                    X = Cv.Round((r.X + r.Width * 0.5) * scale),
                                    Y = Cv.Round((r.Y + r.Height * 0.5) * scale)
                                };
                                int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale);
                                img.Circle(center, radius, new CvColor(0, 0, 0), -1, LineType.Link8, 0);
                            }
                        }
                    FindFace = img.Clone();

                    //생성한 IplImage 화상을 비트맵으로 변환해 반환한다.
                    return(FindFace.ToBitmap(System.Drawing.Imaging.PixelFormat.Format24bppRgb));
                }
        }
예제 #7
0
        /// <summary>
        /// OpenCV関連の変数初期化
        /// </summary>
        private void InitOpenCV()
        {
            openCVImage = new IplImage(
                kinect.ColorStream.FrameWidth,
                kinect.ColorStream.FrameHeight,
                BitDepth.U8, 3);

            openCVGrayImage = new IplImage(kinect.ColorStream.FrameWidth, kinect.ColorStream.FrameHeight, BitDepth.U8, 1);

            storage = new CvMemStorage();
            cascade = CvHaarClassifierCascade.FromFile("../../haarcascade_frontalface_alt2.xml");
        }
예제 #8
0
        public Extract(string fname)
        {
            try
            {
                cascade = CvHaarClassifierCascade.FromFile(fname);
            }
            catch
            {
            }

            storage = new CvMemStorage();
        }
예제 #9
0
        ///////////////////////
        public static IplImage FaceDetect(IplImage src)
        {
            IplImage FindFace;

            // CvHaarClassifierCascade, cvHaarDetectObjects
            // 얼굴을 검출하기 위해서 Haar 분류기의 캐스케이드를 이용한다
            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };
            const double scale        = 1;
            const double scaleFactor  = 1.139;
            const int    minNeighbors = 2;
            IplImage     img          = src.Clone();
            IplImage     smallImg     = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 3);
            {
                // 얼굴 검출용의 화상의 생성
                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                {
                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                    Cv.EqualizeHist(smallImg, smallImg);
                }
                using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\haarcascade_frontalface_default.xml"))
                    using (CvMemStorage storage = new CvMemStorage())
                    {
                        storage.Clear();
                        // 얼굴의 검출
                        CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(24, 24));
                        // 검출한 얼굴에 원을 그린다
                        for (int i = 0; i < faces.Total; i++)
                        {
                            CvRect  r      = faces[i].Value.Rect;
                            CvPoint center = new CvPoint
                            {
                                X = Cv.Round((r.X + r.Width * 0.5) * scale),
                                Y = Cv.Round((r.Y + r.Height * 0.5) * scale)
                            };
                            int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale);
                            img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                        }
                    }
                FindFace = img.Clone();
                return(FindFace);
            }
        }
예제 #10
0
    // Use this for initialization
    void Start()
    {
        PXCUPipeline.Mode mode = Options.mode & (~PXCUPipeline.Mode.VOICE_RECOGNITION);
        if (mode == 0)
        {
            return;
        }

        pp = new PXCUPipeline();
        if (!pp.Init(mode))
        {
            print("Unable to initialize the PXCUPipeline");
            return;
        }

        plane = GameObject.Find("Plane");

        pp.QueryRGBSize(RGBMapSize);
        if (RGBMapSize[0] > 0)
        {
            Debug.Log("rgb map size: width = " + RGBMapSize[0] + ", height = " + RGBMapSize[1]);
            rgbTexture = new Texture2D(RGBMapSize[0], RGBMapSize[1], TextureFormat.ARGB32, false);
            // use the rgb texture as the rendered texture
            plane.renderer.material.mainTexture = rgbTexture;
            pp.QueryDepthMapSize(depthMapSize);
            if (depthMapSize[0] > 0)
            {
                Debug.Log("depth map size: width = " + depthMapSize[0] + ", height = " + depthMapSize[1]);
                depthStorage = new short[depthMapSize[0] * depthMapSize[1]];
            }
            pp.QueryUVMapSize(uvMapSize);
            if (uvMapSize[0] > 0)
            {
                Debug.Log("uv map size: width = " + uvMapSize[0] + ", height = " + uvMapSize[1]);
                uvStorage = new short[uvMapSize[0] * uvMapSize[1] * 2];
            }
        }

        cascade = CvHaarClassifierCascade.FromFile(@"./Assets/haarcascade_frontalface_alt.xml");

        /*capture = Cv.CreateCameraCapture(0); //bentrok dgn pxcupipeline
         * Cv.SetCaptureProperty(capture, CaptureProperty.FrameWidth, CAPTURE_WIDTH);
         * Cv.SetCaptureProperty(capture, CaptureProperty.FrameHeight, CAPTURE_HEIGHT);
         * IplImage frame = Cv.QueryFrame(capture);
         * Debug.Log("width:" + frame.Width + " height:" + frame.Height);*/
        Cv.NamedWindow("FaceDetect");

        CvSVM          svm      = new CvSVM();
        CvTermCriteria criteria = new CvTermCriteria(CriteriaType.Epsilon, 1000, double.Epsilon);
        CvSVMParams    param    = new CvSVMParams(CvSVM.C_SVC, CvSVM.RBF, 10.0, 8.0, 1.0, 10.0, 0.5, 0.1, null, criteria);
    }
예제 #11
0
        private void CaptureCameraCallback()
        {
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 1;
            CvSize       MinSize      = new CvSize(30, 30);

            //mengambil data dari kamera langsung
            CvCapture cap = CvCapture.FromCamera(2);
            //menggunakan cascade classifier
            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_eye.xml");

            //deteksi mata
            while (true)
            {
                IplImage          img  = cap.QueryFrame();
                CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), ScaleFactor, MinNeighbors, HaarDetectionType.DoCannyPruning, MinSize);

                foreach (CvAvgComp mata in eyes.AsParallel())
                {
                    img.DrawRect(mata.Rect, CvColor.Red);

                    if (mata.Rect.Left > pctCvWindow.Width / 2)
                    {
                        try
                        {
                            IplImage MataImg1 = img.Clone();
                            Cv.SetImageROI(MataImg1, mata.Rect);
                            IplImage MataImg2 = Cv.CreateImage(mata.Rect.Size, MataImg1.Depth, MataImg1.NChannels);
                            Cv.Copy(MataImg1, MataImg2, null);
                            Cv.ResetImageROI(MataImg1);
                            Bitmap MataBitmap = BitmapConverter.ToBitmap(MataImg2);
                            Console.WriteLine("Mata terdeteksi, HIDUPKAN vibrator!!!");
                        }
                        catch { }
                    }
                }
                //keluar (while) , mata tidak terdeteksi.
                Bitmap bm = BitmapConverter.ToBitmap(img);
                bm.SetResolution(pctCvWindow.Width, pctCvWindow.Height);
                pctCvWindow.Image = bm;

                img = null;
                bm  = null;
                Console.WriteLine("Mata TIDAK terdeteksi, MATIKAN vibrator!!!");
            }
        }
예제 #12
0
파일: Form1.cs 프로젝트: dongseoki/Naul
        public IplImage FaceDetection(IplImage src)
        {                                                      // https://076923.github.io/posts/C-opencv-29/
            haarface = new IplImage(src.Size, BitDepth.U8, 3); // harrface 는 원본을 복사한 이미지
            Cv.Copy(src, haarface);

            const double scale        = 0.9;   // scale은 검출되는 이미지의 비율
            const double scaleFactor  = 1.139; // 얼굴 검출시에 사용되는 상수
            const int    minNeighbors = 1;     // 얼굴 검출시에 사용되는 상수

            using (IplImage Detected_image = new IplImage(new CvSize(Cv.Round(src.Width / scale), Cv.Round(src.Height / scale)), BitDepth.U8, 1))
            {                                                                  // 검출되는 이미지인 detected image 를 scale의 비율에 맞게 재조정 함
                using (IplImage gray = new IplImage(src.Size, BitDepth.U8, 1)) // 이미지의 크기를 조정
                {
                    Cv.CvtColor(src, gray, ColorConversion.BgrToGray);
                    Cv.Resize(gray, Detected_image, Interpolation.Linear);
                    Cv.EqualizeHist(Detected_image, Detected_image); // 이미지의 화상을 평탄화 (어둡고 밝은 부분이 조정됨)
                }

                using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("../../haarcascade_frontalface_alt.xml"))
                    using (CvMemStorage storage = new CvMemStorage()) // 메모리에 저장소 생성
                    {
                        CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(Detected_image, cascade, storage, scaleFactor, minNeighbors, HaarDetectionType.FindBiggestObject, new CvSize(90, 90), new CvSize(0, 0));
                        // detected_image = 탐지할 이미지, cascade =  storage = 메모리가 저장될 저장소 , HarrDetectionType : 작동 모드
                        if (faces.Total == 1 && checking == false)
                        {
                            checking = true;
                            send_picture();
                        }
                        for (int i = 0; i < faces.Total; i++)
                        {
                            CvRect  r      = faces[i].Value.Rect;
                            CvPoint center = new CvPoint
                            {
                                X = Cv.Round((r.X + r.Width * 0.5) * scale),
                                Y = Cv.Round((r.Y + r.Height * 0.5) * scale)
                            };
                            int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale);
                            haarface.Circle(center, radius, CvColor.Black, 3, LineType.AntiAlias, 0);
                        }
                    }

                return(haarface);
            }
        }
        static void Main(string[] args)
        {
            CvCapture camera = new CvCapture(0);
            CvWindow  win    = new CvWindow();

            CvHaarClassifierCascade face_classifier = CvHaarClassifierCascade.FromFile("haarcascade_frontalface_alt.xml");
            CvMemStorage            storage         = new CvMemStorage();

            while (CvWindow.WaitKey(10) < 0)
            {
                using (IplImage img = camera.QueryFrame())
                {
                    storage.Clear();
                    CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(img, face_classifier, storage, 1.5, 1, HaarDetectionType.ScaleImage, new CvSize(50, 50));
                    for (int i = 0; i < faces.Total; i++)
                    {
                        img.Rectangle(faces[i].Value.Rect, CvColor.Red);
                    }

                    win.Image = img;
                }
            }
        }
예제 #14
0
        //선택창 확인버튼 누르면 -> 영상인식 넘어가야함
        private void Confirm_Click(object sender, EventArgs e)
        {
            pictureBox1.Hide();
            pictureBox2.Hide();
            pictureBox4.Hide();
            M_Box.Hide();
            Confirm.Hide();
            Select_Beginner.Hide();
            Select_Master.Hide();
            pictureBox3.Show();
            MessageBox.Show("눈이 제대로 인식되었다면 ESC버튼을 눌러주세요");

            CvCapture camera = new CvCapture(0); // 카메라 생성
            CvWindow  win    = new CvWindow();   // 윈도우창 생성

            CvHaarClassifierCascade face_classifier =
                CvHaarClassifierCascade.FromFile("./haarcascade_frontalface_alt.xml"); // 얼굴 인식 Haar 알고리즘 불러오기

            CvHaarClassifierCascade eye_classifier =
                CvHaarClassifierCascade.FromFile("./haarcascade_eye.xml"); // 눈 인식 Haar 알고리즘 불러오기

            CvMemStorage storage_face = new CvMemStorage();                // 얼굴 저장 메모리
            CvMemStorage storage_eye  = new CvMemStorage();                // 눈 저장 메모리

            while (CvWindow.WaitKey(10) != 27)                             // <0 : 아무키나 누르면 종료, !=27 esc 누르면 종료
            {
                using (IplImage camera_img = camera.QueryFrame())
                {
                    storage_face.Clear();
                    storage_eye.Clear();

                    Cv.Flip(camera_img, camera_img, FlipMode.Y); // 좌우반전

                    CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(camera_img, face_classifier, storage_face, 1.5, 1,
                                                                   HaarDetectionType.ScaleImage, new CvSize(0, 0), new CvSize(200, 200)); // 얼굴 인식 동작

                    for (int i = 0; i < faces.Total; i++)
                    {
                        camera_img.Rectangle(faces[i].Value.Rect, CvColor.Red); // 인식 된 얼굴에 빨간 사각형 그리기

                        CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(camera_img, eye_classifier, storage_eye, 1.5, 1,
                                                                      HaarDetectionType.ScaleImage, new CvSize(35, 35), new CvSize(50, 50)); // 눈 인식 동작

                        for (int j = 0; j < eyes.Total; j++)                                                                                 // eyes.Total is changing continuously
                        {
                            if (eyes[j].Value.Rect.X > faces[i].Value.Rect.X && eyes[j].Value.Rect.Y > faces[i].Value.Rect.Y &&
                                eyes[j].Value.Rect.X + eyes[j].Value.Rect.Width < faces[i].Value.Rect.X + faces[i].Value.Rect.Width &&
                                eyes[j].Value.Rect.Y + eyes[j].Value.Rect.Height < (faces[i].Value.Rect.Y + faces[i].Value.Rect.Height) - 60)
                            {
                                camera_img.Rectangle(eyes[j].Value.Rect, CvColor.Yellow); // 인식 된 눈에 노란 사각형 그리기
                                Console.WriteLine("Recognition X, Y");
                                Console.WriteLine(">> eye X : {0}, eye Y : {1}", eyes[j].Value.Rect.X, eyes[j].Value.Rect.Y);

                                // 좌표 저장 (왼눈, 오른눈 랜덤으로 됨..) -> recognition 버튼에서 처음에 눈 좌표 저장, 이를 토대로 범위 벗어났는지 아닌지 판별
                                //info.area_X = eyes[j].Value.Rect.X;
                                //info.area_Y = eyes[j].Value.Rect.Y;
                                info.area_X = faces[i].Value.Rect.X;
                                info.area_Y = faces[i].Value.Rect.Y;
                                rect_Width  = faces[i].Value.Rect.Width;
                                rect_Height = faces[i].Value.Rect.Height;
                                Console.WriteLine(">> face X : {0}, face Y : {1}", faces[i].Value.Rect.X, faces[i].Value.Rect.Y);
                            }
                        }
                    }

                    win.Image = camera_img;
                }
            }

            win.Close();
            Cv.ReleaseCapture(camera);
            Console.WriteLine("메모리 해제");

            info.total_time = DateTime.Now; // 스타트 누를 때부터 초세기 (전체시간)

            timer1.Enabled = true;          // 타이머 동작

            timer2.Enabled = true;
            timer2.Tick   += new EventHandler(timer2_Tick);
            timer2.Start();

            if (info.cnt == 0) // 처음 start 버튼 누를 때 0.5초 뒤 바로 카메라 동작
            {
                timer1.Interval = 500;
            }

            timer1.Tick += new EventHandler(timer1_Tick);
            timer1.Start();
        }
예제 #15
0
        // 타이머 함수
        private void timer1_Tick(object sender, EventArgs e)
        {
            if (info.cnt >= 0) // 2번째부터는 카메라 5초 뒤 실행
            {
                timer1.Interval = 5000;
            }

            CvCapture camera = new CvCapture(0); // 카메라 생성
            CvWindow  win    = new CvWindow();   // 윈도우창 생성

            //CvHaarClassifierCascade face_classifier =
            //    CvHaarClassifierCascade.FromFile("C:\\김유민\\Visual Studio 2017\\haarcascade_frontalface_alt.xml"); // 얼굴 인식 Haar 알고리즘 불러오기

            //CvHaarClassifierCascade eye_classifier =
            //    CvHaarClassifierCascade.FromFile("C:\\김유민\\Visual Studio 2017\\haarcascade_eye.xml"); // 눈 인식 Haar 알고리즘 불러오기

            CvHaarClassifierCascade face_classifier =
                CvHaarClassifierCascade.FromFile("./haarcascade_frontalface_alt.xml"); // 얼굴 인식 Haar 알고리즘 불러오기

            CvHaarClassifierCascade eye_classifier =
                CvHaarClassifierCascade.FromFile("./haarcascade_eye.xml"); // 눈 인식 Haar 알고리즘 불러오기

            CvMemStorage storage_face = new CvMemStorage();                // 얼굴 저장 메모리
            CvMemStorage storage_eye  = new CvMemStorage();                // 눈 저장 메모리

            bool check = true;

            info.time = DateTime.Now;
            TimeSpan timecal = DateTime.Now - info.time;

            while (CvWindow.WaitKey(10) != 27 && check) // <0 : 아무키나 누르면 종료, !=27 esc 누르면 종료
            {
                using (IplImage camera_img = camera.QueryFrame())
                {
                    storage_face.Clear();
                    storage_eye.Clear();

                    Cv.Flip(camera_img, camera_img, FlipMode.Y); // 좌우반전

                    CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(camera_img, face_classifier, storage_face, 1.5, 1,
                                                                   HaarDetectionType.ScaleImage, new CvSize(0, 0), new CvSize(200, 200)); // 얼굴 인식 동작

                    for (int i = 0; i < faces.Total; i++)
                    {
                        camera_img.Rectangle(faces[i].Value.Rect, CvColor.Red); // 인식 된 얼굴에 빨간 사각형 그리기

                        CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(camera_img, eye_classifier, storage_eye, 1.5, 1,
                                                                      HaarDetectionType.ScaleImage, new CvSize(35, 35), new CvSize(50, 50)); // 눈 인식 동작

                        for (int j = 0; j < eyes.Total; j++)                                                                                 // eyes.Total is changing continuously
                        {
                            if (eyes[j].Value.Rect.X > faces[i].Value.Rect.X && eyes[j].Value.Rect.Y > faces[i].Value.Rect.Y &&
                                eyes[j].Value.Rect.X + eyes[j].Value.Rect.Width < faces[i].Value.Rect.X + faces[i].Value.Rect.Width &&
                                eyes[j].Value.Rect.Y + eyes[j].Value.Rect.Height < (faces[i].Value.Rect.Y + faces[i].Value.Rect.Height) - 60)
                            {
                                camera_img.Rectangle(eyes[j].Value.Rect, CvColor.Yellow); // 인식 된 눈에 노란 사각형 그리기
                                //Console.WriteLine("eyes : {0}", eyes[j]);
                                Console.WriteLine("Comparing X, Y with Recognition X, Y");
                                Console.WriteLine(">> eye  X : {0}, eye  Y : {1}", eyes[j].Value.Rect.X, eyes[j].Value.Rect.Y);

                                // 좌표 저장 (왼눈, 오른눈 랜덤으로 됨..)
                                //info.eye_X = eyes[j].Value.Rect.X;
                                //info.eye_Y = eyes[j].Value.Rect.Y;
                                info.eye_X = faces[i].Value.Rect.X;
                                info.eye_Y = faces[i].Value.Rect.Y;
                                Console.WriteLine(">> face X : {0}, face Y : {1}", faces[i].Value.Rect.X, faces[i].Value.Rect.Y);
                                Cv.DrawRect(camera_img, info.area_X, info.area_Y, info.area_X + rect_Width, info.area_Y + rect_Height, CvColor.Green);
                            }
                        }
                    }

                    win.Image = camera_img;

                    // 영상 동작 시간
                    timecal = DateTime.Now - info.time;

                    //if (timecal.Minutes == info.minutes) // info.minutes(설정한 분) 뒤에 check가 false로 변하면서 카메라 자동 꺼짐
                    if (timecal.Seconds == 3)
                    {
                        check = false;
                    }
                }

                timer1.Stop();
            }

            // 눈 범위 설정 -> 벗어나면 label 보이기, 숨기기
            if (info.eye_X < info.area_X - 15 || info.eye_X > info.area_X + 15 || info.eye_Y < info.area_Y - 15 || info.eye_Y > info.area_Y + 15)
            {
                info.wrongCount++;

                if (info.wrongCount == 2)
                {
                    Console.Beep(512, 300);
                    Console.Beep(650, 300);
                    Console.Beep(768, 300);
                    System.Windows.Forms.MessageBox.Show("자세를 바르게 하세요");
                    timer1.Stop();
                    info.wrongCount = 0;
                }
                //else
                //{
                //    timer1.Start();
                //}
            }

            info.cnt++;
            win.Close();
            Cv.ReleaseCapture(camera); // 메모리 해제
            //camera.Dispose(); // 메모리 해제
            Console.WriteLine("메모리 해제");
            Console.WriteLine("저장된 좌표 X : {0}, Y : {1}", info.eye_X, info.eye_Y);
            Console.WriteLine("");

            timer1.Start();

            if (flag == 1)
            {
                timer1.Stop();
            }
        }
예제 #16
0
        private void CaptureMotion()
        {
            try
            {
                float wFactor = (float)this.Width / (float)CaptureBox.Width;
                float hFactor = (float)this.Height / (float)CaptureBox.Height;

                CvArr     array = null;
                CvCapture cap   = CvCapture.FromCamera(CaptureDevice.Any);

                this.Invoke(new Action(() =>
                {
                    lblLoading.Visible   = false;
                    radioButton1.Visible = true;
                    radioButton2.Visible = true;
                }));

                while (true)
                {
                    IplImage img = cap.QueryFrame();

                    if (img == null)
                    {
                        continue;
                    }

                    img.Flip(array, FlipMode.Y);

                    if (mode == 1)
                    {
                        string filepath = "haarcascade_frontalface_alt2.xml";

                        CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(filepath);
                        CvSeq <CvAvgComp>       faces   = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), 3.0, 1,
                                                                               HaarDetectionType.Zero, new CvSize(70, 70),
                                                                               new CvSize(500, 500));

                        foreach (CvAvgComp face in faces)
                        {
                            //IplImage ClonedImage = img.Clone();
                            //Cv.SetImageROI(ClonedImage, face.Rect);
                            //IplImage ThisFace = Cv.CreateImage(face.Rect.Size, ClonedImage.Depth, ClonedImage.NChannels);
                            //Cv.Copy(ClonedImage, ThisFace, null);
                            //Cv.ResetImageROI(ClonedImage);

                            //Bitmap FaceImage = BitmapConverter.ToBitmap(ThisFace);
                            //FaceImage.SetResolution(240, 180);
                            //CaptureBox.Image = FaceImage;

                            img.DrawRect(face.Rect, CvColor.Red, 3);

                            Bitmap FaceImage = BitmapConverter.ToBitmap(img);
                            FaceImage.SetResolution(240, 180);
                            CaptureBox.Image = FaceImage;

                            this.Invoke(new Action(() =>
                            {
                                LifeBox.Left = (int)(face.Rect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0));
                                LifeBox.Top  = (int)(face.Rect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0));

                                if (LifeBox.Left > (this.Width - LifeBox.Width - 12))
                                {
                                    LifeBox.Left = (this.Width - LifeBox.Width - 24);
                                }

                                if (LifeBox.Top > (this.Height - LifeBox.Height - 48))
                                {
                                    LifeBox.Top = (this.Height - LifeBox.Height - 48);
                                }

                                if (LifeBox.Left < 12)
                                {
                                    LifeBox.Left = 12;
                                }

                                if (LifeBox.Top < 12)
                                {
                                    LifeBox.Top = 12;
                                }

                                Thread.Sleep(30);
                            }));

                            break;
                        }
                    }
                    else
                    {
                        int AllBlobs = 0;

                        CvBlobs  blobs     = null;
                        IplImage imgHSVsrc = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3);
                        IplImage imgHSVdst = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 1);

                        Cv.CvtColor(img, imgHSVsrc, ColorConversion.BgrToHsv);
                        Cv.InRangeS(imgHSVsrc, new CvScalar(86, 80, 30), new CvScalar(115, 250, 250), imgHSVdst);
                        Cv.ReleaseImage(imgHSVsrc);

                        blobs = new CvBlobs(imgHSVdst);
                        blobs.FilterByArea(7000, 40000);

                        AllBlobs = blobs.Count;

                        foreach (KeyValuePair <int, CvBlob> blob in blobs)
                        {
                            CvBlob  CurrentBlob = blob.Value;
                            CvRect  BlobRect = CurrentBlob.Rect;
                            CvPoint Point1, Point2;

                            Point1.X = BlobRect.X;
                            Point1.Y = BlobRect.Y;
                            Point2.X = BlobRect.X + BlobRect.Width;
                            Point2.Y = BlobRect.Y + BlobRect.Height;

                            img.DrawRect(Point1, Point2, CvColor.LightGreen, 3, LineType.AntiAlias);

                            this.Invoke(new Action(() =>
                            {
                                LifeBox.Left = (int)(BlobRect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0));
                                LifeBox.Top  = (int)(BlobRect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0));

                                if (LifeBox.Left > (this.Width - LifeBox.Width - 12))
                                {
                                    LifeBox.Left = (this.Width - LifeBox.Width - 24);
                                }

                                if (LifeBox.Top > (this.Height - LifeBox.Height - 48))
                                {
                                    LifeBox.Top = (this.Height - LifeBox.Height - 48);
                                }

                                if (LifeBox.Left < 12)
                                {
                                    LifeBox.Left = 12;
                                }

                                if (LifeBox.Top < 12)
                                {
                                    LifeBox.Top = 12;
                                }

                                Thread.Sleep(30);
                            }));

                            break;
                        }

                        Bitmap Item = BitmapConverter.ToBitmap(img);
                        Item.SetResolution(240, 180);
                        CaptureBox.Image = Item;

                        Bitmap HSVItem = BitmapConverter.ToBitmap(imgHSVdst);
                        HSVItem.SetResolution(240, 180);
                        HSVCaptureBox.Image = HSVItem;

                        Cv.ReleaseImage(imgHSVdst);
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine("ERROR: " + e.Message + "DETAILS: " + e.StackTrace);
            }
        }
예제 #17
0
        private void CaptureCameraCallback()
        {
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 1;
            CvSize       MinSize      = new CvSize(30, 30);

            CvCapture cap = CvCapture.FromCamera(1);
            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_eye.xml");

            while (true)
            {
                IplImage img = cap.QueryFrame();
                //IplImage src = new IplImage(new CvSize(600, 400),BitDepth.U8,1);
                //Cv.Resize(img, src, Interpolation.Linear);
                CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), ScaleFactor, MinNeighbors, HaarDetectionType.DoCannyPruning, MinSize);

                foreach (CvAvgComp eye in eyes.AsParallel())
                {
                    img.DrawRect(eye.Rect, CvColor.Red);

                    if (eye.Rect.Left > pctCvWindow.Width / 2)
                    {
                        try
                        {
                            IplImage rightEyeImg1 = img.Clone();
                            Cv.SetImageROI(rightEyeImg1, eye.Rect);
                            IplImage rightEyeImg2 = Cv.CreateImage(eye.Rect.Size, rightEyeImg1.Depth, rightEyeImg1.NChannels);
                            Cv.Copy(rightEyeImg1, rightEyeImg2, null);
                            Cv.ResetImageROI(rightEyeImg1);


                            Bitmap rightEyeBm = BitmapConverter.ToBitmap(rightEyeImg2);
                            pctRightEye.Image = rightEyeBm;
                        }
                        catch { }
                    }
                    else
                    {
                        try
                        {
                            IplImage leftEyeImg1 = img.Clone();
                            Cv.SetImageROI(leftEyeImg1, eye.Rect);
                            IplImage leftEyeImg2 = Cv.CreateImage(eye.Rect.Size, leftEyeImg1.Depth, leftEyeImg1.NChannels);
                            Cv.Copy(leftEyeImg1, leftEyeImg2, null);
                            Cv.ResetImageROI(leftEyeImg1);

                            Bitmap leftEyeBm = BitmapConverter.ToBitmap(leftEyeImg2);
                            //pctLeftEye.Image = leftEyeBm;
                            //pctLeftEye.Visible = false;
                        }catch {}
                    }
                }

                Bitmap bm = BitmapConverter.ToBitmap(img);
                bm.SetResolution(pctCvWindow.Width, pctCvWindow.Height);
                pctCvWindow.Image = bm;

                img = null;
                bm  = null;
            }
        }
        private FaceImage FaceDetect(FaceImage faceImage)
        {
            if (faceImage.FullName == null || File.Exists(faceImage.FullName) == false || faceImage.FullName == string.Empty)
            {
                throw new ArgumentException("Image's path is not valid: {0}.", faceImage.FullName);
            }

            var facesDir = CreateDirByShortName(Strings.Face_Database_Folder_Name);

            var fullName = CreateFaceFileFullName(faceImage, facesDir);

            faceImage.FullName = fullName;

            if (File.Exists(fullName))
            {
                return(faceImage);
            }

            try
            {
                using (IplImage img = new IplImage(faceImage.FullName, LoadMode.AnyColor))
                {
                    using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                    {
                        using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                        {
                            Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                            Cv.Resize(gray, smallImg, Interpolation.Linear);
                            Cv.EqualizeHist(smallImg, smallImg);
                        }

                        //using (CvHaarClassifierCascade cascade = Cv.Load<CvHaarClassifierCascade>(Const.XmlHaarcascade))
                        using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Constants.FaceCascadeDefFileName))
                            using (CvMemStorage storage = new CvMemStorage())
                            {
                                storage.Clear();

                                Stopwatch         watch = Stopwatch.StartNew();
                                CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, /*1.2*/ ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));//(30,30)
                                watch.Stop();

                                DetectionTime = watch.ElapsedMilliseconds; //detection time (ms)

                                for (int i = 0; i < faces.Total; i++)
                                {
                                    CvRect r = faces[i].Value.Rect;
                                    //CvPoint center = new CvPoint
                                    //{
                                    //    X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                    //    Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                    //};
                                    //int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                    //img.Circle(center, radius, new CvColor(0, 0, 255), 3, LineType.AntiAlias, 0);

                                    smallImg.ROI = new CvRect(r.X, r.Y, r.Width, r.Height);

                                    IplImage subImage = smallImg.GetSubImage(smallImg.ROI);

                                    _face = BitmapConverter.ToBitmap(subImage);

                                    FaceImage newFaceImage = SaveFace(faceImage);
                                    return(newFaceImage);
                                }
                            }
                    }
                }
            }
            catch (Exception ex)
            {
                bool rethrow = ExceptionPolicy.HandleException(ex, "BusinessLogin Policy");
                if (rethrow)
                {
                    throw;
                }

                MessageBox.Show(string.Format("Failed to separate face from image"));
            }
            return(null);
        }
예제 #19
0
        static void Main()
        {
            // CvCapture cap = CvCapture.FromFile("video.avi");
            CvCapture cap = CvCapture.FromFile("road_3.avi");

            CvWindow w     = new CvWindow("Lane Detection");
            CvWindow canny = new CvWindow("Lane Detection_2");
            CvWindow hough = new CvWindow("Lane Detection");
            //   CvWindow smoothing = new CvWindow("Lane Detection_3");


            IplImage                src, gray, dstCanny, halfFrame, smallImg;
            CvMemStorage            storage = new CvMemStorage();
            CvSeq                   lines;
            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_cars3.xml");

            const double Scale        = 2.0;
            const double ScaleFactor  = 1.05;
            const int    MinNeighbors = 3;
            double       min_range    = 70;
            double       max_range    = 120;


            CvSeq <CvAvgComp> cars;

            while (CvWindow.WaitKey(10) < 0)
            {
                src       = cap.QueryFrame();
                halfFrame = new IplImage(new CvSize(src.Size.Width / 2, src.Size.Height / 2), BitDepth.U8, 3);
                Cv.PyrDown(src, halfFrame, CvFilter.Gaussian5x5);


                gray = new IplImage(src.Size, BitDepth.U8, 1);

                dstCanny = new IplImage(src.Size, BitDepth.U8, 1);

                /*
                 *
                 * smallImg = new IplImage(new CvSize(Cv.Round(src.Width / Scale), Cv.Round(src.Height / Scale)), BitDepth.U8, 1);
                 * using (IplImage grey = new IplImage(src.Size, BitDepth.U8, 1))
                 * {
                 * Cv.CvtColor(src, grey, ColorConversion.BgrToGray);
                 * Cv.Resize(grey, smallImg, Interpolation.Linear);
                 * Cv.EqualizeHist(smallImg, smallImg);
                 * }
                 *
                 * cars = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, HaarDetectionType.DoCannyPruning, new CvSize(30, 30));
                 *
                 * for (int i = 0; i < cars.Total; i++)
                 * {
                 * CvRect r = cars[i].Value.Rect;
                 * CvPoint center = new CvPoint
                 * {
                 *     X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                 *     Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                 * };
                 * int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                 * src.Circle(center, radius, CvColor.Blue, 2, LineType.AntiAlias, 0);
                 * } */

                // Crop off top half of image since we're only interested in the lower portion of the video
                int halfWidth  = src.Width / 2;
                int halfHeight = src.Height / 2;
                int startX     = halfWidth - (halfWidth / 2);
                src.SetROI(new CvRect(0, halfHeight - 0, src.Width - 1, src.Height - 1));

                gray.SetROI(src.GetROI());
                dstCanny.SetROI(src.GetROI());

                src.CvtColor(gray, ColorConversion.BgrToGray);
                Cv.Smooth(gray, gray, SmoothType.Gaussian, 5, 5);
                Cv.Canny(gray, dstCanny, 50, 200, ApertureSize.Size3);

                storage.Clear();
                lines = dstCanny.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 100);

                for (int i = 0; i < lines.Total; i++)
                {
                    CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value;

                    int    dx    = elem.P2.X - elem.P1.X;
                    int    dy    = elem.P2.Y - elem.P1.Y;
                    double angle = Math.Atan2(dy, dx) * 180 / Math.PI;

                    //   if (Math.Abs(angle) <= 10)
                    //     continue;

                    if (elem.P1.Y > elem.P2.Y + 50 || elem.P1.Y < elem.P2.Y - 50)
                    {
                        src.Line(elem.P1, elem.P2, CvColor.Green, 9, LineType.Link8, 0);
                    }
                }

                src.ResetROI();


                storage.Clear();
                w.Image = src;
                // canny.Image = dstCanny;
                // smoothing.Image = gray;
                //    w.Image = dstCanny;
                //  w.Image = dstCanny;
            }
        }