예제 #1
0
        private Image <Gray, Byte> SkinDetect(Image <Bgr, Byte> Input, IColor min, IColor max, int a)
        {
            Image <Gray, byte> skin = new Image <Gray, byte>(Input.Width, Input.Height);

            if (a == 1)
            {
                Image <Ycc, Byte> YCrCbInput = Input.Convert <Ycc, Byte>();
                skin = YCrCbInput.InRange((Ycc)min, (Ycc)max);
            }
            else
            {
                Image <Hsv, Byte> HsvInput = Input.Convert <Hsv, Byte>();
                skin = HsvInput.InRange((Hsv)min, (Hsv)max);
            }

            Image <Gray, Byte> skin2 = skin.Convert <Gray, Byte>();

            imageBox3.Image = skin2;

            StructuringElementEx Rect12 = new StructuringElementEx(12, 12, 6, 6, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_CROSS);
            StructuringElementEx Rect6  = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_CROSS);

            CvInvoke.cvErode(skin, skin, Rect6, 1);
            CvInvoke.cvDilate(skin, skin, Rect12, 1);

            return(skin);
        }
예제 #2
0
        public Image <Bgr, Byte> FindContourSperatly(Image <Bgr, Byte> image)//tasviri ke mikhahid contour ha dar an peyda shavand be onvane vorudi be aan dade mishavad
        {
            Gray cannyThreshold           = new Gray(180);
            Gray cannyThresholdLinking    = new Gray(120);
            Image <Gray, Byte> grayImage  = image.Convert <Gray, Byte>();
            Image <Gray, Byte> cannyImage = new Image <Gray, Byte>(grayImage.Size);

            CvInvoke.cvCanny(grayImage, cannyImage, 100, 360, 3);//threshold baraye canny

            Image <Bgr, Byte> BoundryImage = image.CopyBlank();

            StructuringElementEx kernel = new StructuringElementEx(3, 3, 1, 1, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_ELLIPSE);

            CvInvoke.cvDilate(cannyImage, cannyImage, kernel, 1);

            IntPtr cont = IntPtr.Zero;

            Point[] pts;
            Point   p = new Point(0, 0);

            using (MemStorage storage = new MemStorage()) //allocate storage for contour approximation
                for (Contour <Point> contours = cannyImage.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
                                                                        Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_EXTERNAL); contours != null; contours = contours.HNext)
                {
                    pts = contours.ToArray();

                    //********keshidan khat dor ta dore contour******************/
                    BoundryImage.DrawPolyline(pts, true, new Bgr(255, 0, 255), 3);

                    /***************joda kardane ghesmate contoure tasvir****************/
                }


            return(BoundryImage);
        }
예제 #3
0
        // Cherche les contours et bounding boxes de la peau sur une image fournie
        // Cette partie a été développée par nous-même
        // Elle détecte et sépare les parties de peau de l'image fournie à la classe, d'après les couleurs HSV
        // On va ensuite eroder et dilater les pixels restants, afin d'obtenir une image "plus nette" et réduire le bruit et les imperfections
        // On va ensuite rechercher tous les contours dans l'image obtenur et mettre le plus grand de côté afin de pouvoir le traiter plus tard
        public void FindSkinContours()
        {
            // Converti les couleurs de l'image en HSV
            Image <Hsv, Byte> hsvImg = imgOrg.Convert <Hsv, Byte>();

            // Filtre les pixels de l'image afin de ne garder que ceux qui se rapprochent de la couleur de la peau
            // Nous avons trouvé ces valeurs sur Internet et les avons un peu adaptées pour notre utilisation
            imgSkin = hsvImg.InRange(new Hsv(0, 48, 80), new Hsv(20, 255, 255));

            imgSkin = imgSkin.ThresholdBinary(new Gray(200), new Gray(255));

            // On erode et dilate pour éliminer les imperfections
            // Nous avons trouvé ces valeurs par "tattonement" en essayant d'avoir un contour de main le plus net possible
            StructuringElementEx erodeStrctEl = new StructuringElementEx(4, 4, 2, 2, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(imgSkin, imgSkin, erodeStrctEl, 1);
            StructuringElementEx dilateStrctEl = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(imgSkin, imgSkin, dilateStrctEl, 3);

            // Boucle sur tous les contours trouvés dans l'image
            for (Contour <Point> contours = imgSkin.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_TREE, contoursStorage); contours != null; contours = contours.HNext)
            {
                // Si un contour est plus grand que 20x20 pixels, on le traite
                if (contours.BoundingRectangle.Width > 20 && contours.BoundingRectangle.Height > 20)
                {
                    // Récupère la bounding box autour du contour
                    Rectangle rect = contours.BoundingRectangle;

                    // On agrandi la bourning box et on vérifie qu'elle ne sorte pas de l'image
                    rect.X = rect.X - 10;
                    rect.X = (rect.X < 0) ? 0 : rect.X;
                    rect.Y = rect.Y - 10;
                    rect.Y = (rect.Y < 0) ? 0 : rect.Y;

                    rect.Height = (rect.Height + 20);
                    rect.Width  = (rect.Width + 20);

                    // Ajout de la bounding box et du contour aux listes
                    boundingBoxes.Add(rect);
                    contoursList.Add(contours);
                }
            }

            // Si des contours ont été trouvés, on cherche le plus grand
            if (boundingBoxes.Count > 0)
            {
                FindBiggestBoundingBox();
                FindBiggestContour();
                isHandDetected = true;
            }
            else
            {
                // Sinon, c'est que rien n'a été détecté
                isHandDetected = false;
                fingerNum      = 0;
            }
        }
예제 #4
0
        public FeatureExtractor(IPerspectiveCorrector perspectiveCorrector, int N, int M, int frameWidth, int frameHeight, Image <Bgr, byte> backgroundImage)
        {
            this.perspectiveCorrector = perspectiveCorrector;
            this.background           = backgroundImage.Resize(frameWidth, frameHeight, INTER.CV_INTER_AREA).Convert <Gray, byte>();
            this.N           = N;
            this.M           = M;
            this.frameWidth  = frameWidth;
            this.frameHeight = frameHeight;

            structuringElement = new StructuringElementEx(3, 3, 1, 1, CV_ELEMENT_SHAPE.CV_SHAPE_ELLIPSE);
        }
예제 #5
0
        /// <summary>
        /// WORKS OK! USE IT!
        /// </summary>
        /// <param name="currentFrame"></param>
        /// <param name="draw"></param>
        /// <returns></returns>
        public static Rectangle FindBiggestBlob(this Image <Bgr, Byte> currentFrame, bool draw = false)
        {
            using (Image <Gray, Byte> img1 = currentFrame.Convert <Gray, Byte>())
                using (Image <Gray, Byte> img2 = img1.PyrDown())
                    using (Image <Gray, Byte> grayImage = img2.PyrUp())
                        using (Image <Gray, Byte> cannyImage = new Image <Gray, Byte>(grayImage.Size))
                        {
                            CvInvoke.cvCanny(grayImage, cannyImage, 10, 60, 3);

                            using (StructuringElementEx kernel = new StructuringElementEx(3, 3, 1, 1, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT))
                                CvInvoke.cvDilate(cannyImage, cannyImage, kernel, 1);

                            IntPtr cont = IntPtr.Zero;

                            int top = int.MaxValue, left = int.MaxValue, bottom = 0, right = 0;

                            //allocate storage for contour approximation
                            using (MemStorage storage = new MemStorage())
                            {
                                for (Contour <Point> contours = cannyImage.FindContours(CHAIN_APPROX_METHOD.CV_LINK_RUNS, RETR_TYPE.CV_RETR_LIST);
                                     contours != null; contours = contours.HNext)
                                {
#if KNOW_HOW
                                    IntPtr seq     = CvInvoke.cvConvexHull2(contours, storage.Ptr, Emgu.CV.CvEnum.ORIENTATION.CV_CLOCKWISE, 0);
                                    IntPtr defects = CvInvoke.cvConvexityDefects(contours, seq, storage);
#endif
                                    Seq <Point> tr = contours.GetConvexHull(Emgu.CV.CvEnum.ORIENTATION.CV_CLOCKWISE);

#if KNOW_HOW
                                    Seq <Emgu.CV.Structure.MCvConvexityDefect> te = contours.GetConvexityDefacts(storage, Emgu.CV.CvEnum.ORIENTATION.CV_CLOCKWISE);
#endif
                                    if (draw)
                                    {
                                        currentFrame.Draw(tr.BoundingRectangle, new Bgr(Color.Red), 3);
                                    }

                                    top    = Math.Min(top, tr.BoundingRectangle.Top);
                                    left   = Math.Min(left, tr.BoundingRectangle.Left);
                                    bottom = Math.Max(bottom, tr.BoundingRectangle.Bottom);
                                    right  = Math.Max(right, tr.BoundingRectangle.Right);
                                }
                            }

                            Rectangle result = Rectangle.FromLTRB(left, top, right, bottom);
                            if (draw)
                            {
                                currentFrame.Draw(result, new Bgr(Color.Blue), 3);
                            }
                            return(result);
                        }
        }
예제 #6
0
        private Image <Gray, byte> GetSkin(Image <Bgr, byte> Img, IColor min, IColor Max)
        {
            Image <Ycc, byte>  Img_Ycbcr = Img.Convert <Ycc, byte>();
            Image <Gray, byte> skins     = new Image <Gray, byte>(Img.Width, Img.Height);

            skins = Img_Ycbcr.InRange((Ycc)min, (Ycc)Max);
            StructuringElementEx rect_for_erode = new StructuringElementEx(12, 12, 6, 6, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(skins, skins, rect_for_erode, 1);
            StructuringElementEx rect_for_Dilate = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(skins, skins, rect_for_Dilate, 2);
            return(skins);
        }
        public override Image <Gray, byte> DetectSkin(Image <Bgr, byte> Img, IColor min, IColor max)
        {
            Image <Ycc, Byte>  currentYCrCbFrame = Img.Convert <Ycc, Byte>();
            Image <Gray, byte> skin = new Image <Gray, byte>(Img.Width, Img.Height);

            skin = currentYCrCbFrame.InRange((Ycc)min, (Ycc)max);
            StructuringElementEx rect_12 = new StructuringElementEx(12, 12, 6, 6, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(skin, skin, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(skin, skin, rect_6, 2);
            return(skin);
        }
        public override Image <Gray, byte> DetectSkin(Image <Bgr, byte> Img, IColor min, IColor max)
        {
            //Code adapted from here
            // http://blog.csdn.net/scyscyao/archive/2010/04/09/5468577.aspx
            // Look at this paper for reference (Chinese!!!!!)
            // http://www.chinamca.com/UploadFile/200642991948257.pdf

            Image <Ycc, Byte>  currentYCrCbFrame = Img.Convert <Ycc, Byte>();
            Image <Gray, Byte> skin = new Image <Gray, Byte>(Img.Width, Img.Height);

            int y, cr, cb, l, x1, y1, value;

            int rows = Img.Rows;
            int cols = Img.Cols;

            Byte[,,] YCrCbData = currentYCrCbFrame.Data;
            Byte[,,] skinData  = skin.Data;

            for (int i = 0; i < rows; i++)
            {
                for (int j = 0; j < cols; j++)
                {
                    y  = YCrCbData[i, j, 0];
                    cr = YCrCbData[i, j, 1];
                    cb = YCrCbData[i, j, 2];

                    cb   -= 109;
                    cr   -= 152;
                    x1    = (819 * cr - 614 * cb) / 32 + 51;
                    y1    = (819 * cr + 614 * cb) / 32 + 77;
                    x1    = x1 * 41 / 1024;
                    y1    = y1 * 73 / 1024;
                    value = x1 * x1 + y1 * y1;
                    if (y < 100)
                    {
                        skinData[i, j, 0] = (value < 700) ? (byte)255 : (byte)0;
                    }
                    else
                    {
                        skinData[i, j, 0] = (value < 850) ? (byte)255 : (byte)0;
                    }
                }
            }
            var rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(skin, skin, rect_6, 1);
            CvInvoke.cvDilate(skin, skin, rect_6, 2);
            return(skin);
        }
예제 #9
0
        private Math::Matrix <double> CalculateInputMatrix(IList <string> frameImageFilePaths)
        {
            Math::Matrix <double> X      = new DenseMatrix(frameImageFilePaths.Count, featureExtractor.Length);
            StructuringElementEx  kernel = new StructuringElementEx(3, 3, 1, 1, CV_ELEMENT_SHAPE.CV_SHAPE_ELLIPSE);

            ParallelOptions options = new ParallelOptions();

            options.MaxDegreeOfParallelism = 1;
            Parallel.ForEach(Enumerable.Range(0, frameImageFilePaths.Count), options, i =>
            {
                Math::Vector <double> featureVector = featureExtractor.ExtractFeatures(new Image <Gray, Byte>(frameImageFilePaths[i]));
                X.SetSubMatrix(i, 1, 0, featureExtractor.Length, featureVector.ToRowMatrix());
            });

            Console.Write(X.Row(1));
            return(X);
        }
        public override Image <Gray, byte> DetectSkin(Image <Bgr, byte> Img, IColor min, IColor max)
        {
            Image <Ycc, Byte>  currentYCrCbFrame = Img.Convert <Ycc, Byte>();
            Image <Gray, Byte> skin = new Image <Gray, Byte>(Img.Width, Img.Height);

            int y, cr, cb, l, x1, y1, value;

            int rows = Img.Rows;
            int cols = Img.Cols;

            Byte[, ,] YCrCbData = currentYCrCbFrame.Data;
            Byte[, ,] skinData  = skin.Data;

            for (int i = 0; i < rows; i++)
            {
                for (int j = 0; j < cols; j++)
                {
                    y  = YCrCbData[i, j, 0];
                    cr = YCrCbData[i, j, 1];
                    cb = YCrCbData[i, j, 2];

                    cb   -= 109;
                    cr   -= 152;
                    x1    = (819 * cr - 614 * cb) / 32 + 51;
                    y1    = (819 * cr + 614 * cb) / 32 + 77;
                    x1    = x1 * 41 / 1024;
                    y1    = y1 * 73 / 1024;
                    value = x1 * x1 + y1 * y1;
                    if (y < 100)
                    {
                        skinData[i, j, 0] = (value < 700) ? (byte)255 : (byte)0;
                    }
                    else
                    {
                        skinData[i, j, 0] = (value < 850) ? (byte)255 : (byte)0;
                    }
                }
            }
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(skin, skin, rect_6, 1);
            CvInvoke.cvDilate(skin, skin, rect_6, 2);
            return(skin);
        }
        ///////////////////////////////////////////////////////////////////////////////////////////
        void processFrameAndUpdateGUI(object sender, EventArgs arg)
        {
            imgOriginal = capWebcam.QueryFrame();               // get next frame from the webcam

            if (imgOriginal == null)                            // if we did not get a frame
            {                                                   // show error via message box
                MessageBox.Show("unable to read from webcam" + Environment.NewLine + Environment.NewLine +
                                "exiting program");
                Environment.Exit(0);                            // and exit program
            }

            imgBlurredBGR = imgOriginal.SmoothGaussian(5);                                                                      // blur

            imgProcessed = imgBlurredBGR.InRange(new Bgr(0, 0, 175), new Bgr(100, 100, 256));                                   // filter on color

            imgProcessed = imgProcessed.SmoothGaussian(5);                                                                      // blur again

            StructuringElementEx structuringElementEx = new StructuringElementEx(5, 5, 1, 1, CV_ELEMENT_SHAPE.CV_SHAPE_RECT);   // declare structuring element to use in dilate and erode

            CvInvoke.cvDilate(imgProcessed, imgProcessed, structuringElementEx, 1);                                             // close image (dilate, then erode)
            CvInvoke.cvErode(imgProcessed, imgProcessed, structuringElementEx, 1);                                              // closing "closes" (i.e. fills in) foreground gaps

            CircleF[] circles = imgProcessed.HoughCircles(new Gray(100), new Gray(50), 2, imgProcessed.Height / 4, 10, 400)[0]; // fill variable circles with all circles in the processed image

            foreach (CircleF circle in circles)                                                                                 // for each circle
            {
                if (txtXYRadius.Text != "")
                {
                    txtXYRadius.AppendText(Environment.NewLine);                                      // if we are not on the first line in the text box then insert a new line char
                }
                txtXYRadius.AppendText("ball position = x " + circle.Center.X.ToString().PadLeft(4) + // print ball position and radius
                                       ", y = " + circle.Center.Y.ToString().PadLeft(4) +             //
                                       ", radius = " + circle.Radius.ToString("###.000").PadLeft(7)); //

                txtXYRadius.ScrollToCaret();                                                          // scroll down in text box so most recent line added (at the bottom) will be shown

                // draw a small green circle at the center of the detected object
                CvInvoke.cvCircle(imgOriginal, new Point((int)circle.Center.X, (int)circle.Center.Y), 3, new MCvScalar(0, 255, 0), -1, LINE_TYPE.CV_AA, 0);

                imgOriginal.Draw(circle, new Bgr(Color.Red), 3); // draw a red circle around the detected object
            }
            ibOriginal.Image  = imgOriginal;                     // update image boxes on form
            ibProcessed.Image = imgProcessed;                    //
        }
예제 #12
0
        //장애물 색상이 맞는지 판별
        public int obstacle_YccColorCheck(Image <Bgr, Byte> iamge, int totalPicxel, int pos_x, int pos_y, int img_width, int img_height, int min1, int min2, int min3, int max1, int max2, int max3)
        {
            int pixCount = 0;

            Image <Ycc, Byte>  YCrCbFrame   = iamge.Convert <Ycc, Byte>();                                 //YCrCb 변환
            Image <Gray, byte> colorSetting = new Image <Gray, byte>(YCrCbFrame.Width, YCrCbFrame.Height); //Ycc범위로 뽑아낸 것을 gray로 바꿔서 수축팽창 하기 위해

            Ycc YCrCb_min = new Ycc(min1, min2, min3);
            Ycc YCrCb_max = new Ycc(max1, max2, max3);                         //blue 색 범위

            colorSetting = YCrCbFrame.InRange((Ycc)YCrCb_min, (Ycc)YCrCb_max); //색 범위 설정

            StructuringElementEx rect_12 = new StructuringElementEx(12, 12, 6, 6, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(colorSetting, colorSetting, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(colorSetting, colorSetting, rect_6, 2);          //수축 팽창

            Image <Bgr, Byte> colorCount = colorSetting.Convert <Bgr, Byte>(); //픽셀수 세기 위해

            for (int x = pos_x; x < pos_x + img_width; x++)
            {
                for (int y = pos_y; y < pos_y + img_height; y++)
                {
                    if (colorCount[y, x].Equals(new Bgr(255, 255, 255)))
                    {
                        pixCount++;
                        if (totalPicxel / 10 <= pixCount) //일정 픽섹 이상시 색상배열 변경후 종료
                        {
                            return(1);
                        }

                        if (x > pos_x / 5 + x && y > pos_y / 5 + y) //좌표의 1/5 넘으면 없는걸로
                        {
                            return(-1);
                        }
                    }
                }
            }

            return(-1);
        }
예제 #13
0
        public static double GetMinkowskiDimension(Image <Gray, byte> image)
        {
            Image <Gray, byte> dilated = new Image <Gray, byte>(image.Width, image.Height);

            int nMaxDisk   = 15;
            int diskStep   = 1;
            int nDiskSteps = ((nMaxDisk - 2) / diskStep) + 1;

            MathN::Matrix <double> X = new DenseMatrix(nDiskSteps, 2);
            MathN::Vector <double> Y = new DenseVector(nDiskSteps);

            X[0, 0] = 1;
            X[0, 1] = Math.Log(image.CountNonzero()[0]);

            int row = 1;

            for (int diskSize = 1 + diskStep; diskSize < nMaxDisk; diskSize += diskStep)
            {
                StructuringElementEx kernel = new StructuringElementEx(diskSize, diskSize, (diskSize - 1) / 2, (diskSize - 1) / 2, CV_ELEMENT_SHAPE.CV_SHAPE_ELLIPSE);
                CvInvoke.cvDilate(image, dilated, kernel, 1);

                X[row, 0] = 1;
                X[row, 1] = Math.Log(diskSize);
                Y[row]    = Math.Log(dilated.CountNonzero()[0]);

                if (Double.IsNegativeInfinity(Y[row]))
                {
                    Y[row] = 0;
                }
                ++row;
            }

            MathN::Vector <double> W = X.QR().Solve(Y);

            double slope = W[1];

            return(2 - slope);
        }
예제 #14
0
        void IdleProcess(object sender, EventArgs e)
        {
            Emgu.CV.Image <Bgr, Byte> CameraImage = camera.QueryFrame();
            CameraImage = new Image <Bgr, Byte>(CropImage(ImageRect));


            if (iscroping)
            {
                CameraImage.Draw(new Rectangle(cropx, cropy, cropwidth, cropheight), new Bgr(Color.Green), 3);
            }

            camimagebox.Image = CameraImage.Bitmap;
            ProcessedImage    = CameraImage.InRange(new Bgr(Bmin.Value, Gmin.Value, Rmin.Value), new Bgr(Bmax.Value, Gmax.Value, Rmax.Value));

            ProcessedImage.SmoothGaussian(9);
            StructuringElementEx rect_12 = new StructuringElementEx(12, 12, 6, 6, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(ProcessedImage, ProcessedImage, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(ProcessedImage, ProcessedImage, rect_6, 2);

            ExtractShapes(ProcessedImage, CameraImage);
            processedimagebox.Image = ProcessedImage.Bitmap;

            /*  if (portconnected)
             * {
             *    if (tosend == 1)
             *        serialPort1.Write("To send Cuboid");
             *    else if (tosend == 2)
             *        serialPort1.Write("To Send Cylinder");
             * }
             */

            label1.Text = rectangle_x + "& Y:" + rectangle_y;
        }
예제 #15
0
        public bool detectAlone(Image <Gray, byte> img, Image <Gray, byte> prev, Rectangle eyeROI, Rectangle prevEyeROI, ref Boolean isShook)
        {
            if (eyeROI.Width == 0 || prevEyeROI.Width == 0)
            {
                return(false);
            }

            // when the difference between two eyeROI is huge, then it is probably caused by shaking
            int axisGap = 10;

            if (System.Math.Abs(eyeROI.X - prevEyeROI.X) > axisGap || System.Math.Abs(eyeROI.Y - prevEyeROI.Y) > axisGap)
            {
                isShook = true;
                return(false);
            }

            int widthGap = 10;

            if (System.Math.Abs(eyeROI.Width - prevEyeROI.Height) > widthGap)
            {
                isShook = true;
                return(false);
            }

            // Enlarge the ROI to make the algorithm more stable
            float widthFactor = 1.2F;
            int   newX        = (int)(eyeROI.X - (widthFactor - 1) * eyeROI.Width / 2);
            int   newY        = (int)(eyeROI.Y - (widthFactor - 1) * eyeROI.Height / 2);
            int   newWidth    = (int)(widthFactor * eyeROI.Width);
            int   newHeight   = (int)(widthFactor * eyeROI.Height);

            // Boundary judgment
            if (newX < 0)
            {
                newX = 0;
            }
            if (newY < 0)
            {
                newY = 0;
            }
            if (newX + newWidth > img.Width)
            {
                newX = img.Width - newWidth;
            }
            if (newY + newHeight > img.Height)
            {
                newY = img.Height - newHeight;
            }

            Rectangle newROI = new Rectangle(new Point(newX, newY), new Size(newWidth, newHeight));

            Image <Gray, byte> diff = new Image <Gray, byte>(img.Width, img.Height);

            CvInvoke.cvSetImageROI(img, newROI);
            CvInvoke.cvSetImageROI(prev, newROI);
            CvInvoke.cvSetImageROI(diff, newROI);

            CvInvoke.cvSub(img, prev, diff, IntPtr.Zero);

            CvInvoke.cvThreshold(diff, diff, 100, 255, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY);

            StructuringElementEx element = new StructuringElementEx(5, 5, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_ELLIPSE);

            CvInvoke.cvMorphologyEx(diff, diff, IntPtr.Zero, element, Emgu.CV.CvEnum.CV_MORPH_OP.CV_MOP_CLOSE, 1);
            CvInvoke.cvMorphologyEx(diff, diff, IntPtr.Zero, element, Emgu.CV.CvEnum.CV_MORPH_OP.CV_MOP_OPEN, 3);

            IntPtr     comp    = new IntPtr();//存放检测到的图像块的首地址
            MemStorage storage = new MemStorage();
            int        nc      = CvInvoke.cvFindContours(diff, storage, ref comp, StructSize.MCvContour,
                                                         Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_CCOMP, Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, new Point(0, 0));

            // save eye region and previous eye region for latter use
            Image <Gray, byte> eye     = img.Copy();
            Image <Gray, byte> prevEye = prev.Copy();

            CvInvoke.cvResetImageROI(img);
            CvInvoke.cvResetImageROI(prev);
            CvInvoke.cvResetImageROI(diff);


            if (nc != 1)
            {
                return(false);
            }

            Rectangle rect = CvInvoke.cvBoundingRect(comp, 1);

            rect.X = rect.X + newROI.X;
            rect.Y = rect.Y + newROI.Y;

            if (rect.X < eyeROI.X || rect.Y < eyeROI.Y)
            {
                return(false);
            }
            if (rect.X + rect.Width > eyeROI.X + eyeROI.Width || rect.Y + rect.Height > eyeROI.Y + eyeROI.Height)
            {
                return(false);
            }

            /// reduce the effct of shaking
            byte pixelThreshold  = 100;
            int  sumThresholdLow = 100;
            int  sumThresholdUp  = 600;

            int sumPixelEye = 0;

            for (int i = 0; i < eye.Width; i++)
            {
                for (int j = 0; j < eye.Height; j++)
                {
                    if (eye.Data[i, j, 0] < pixelThreshold)
                    {
                        sumPixelEye++;
                    }
                }
            }

            int sumPixelPrevEye = 0;

            for (int i = 0; i < prevEye.Width; i++)
            {
                for (int j = 0; j < prevEye.Height; j++)
                {
                    if (prevEye.Data[i, j, 0] < pixelThreshold)
                    {
                        sumPixelPrevEye++;
                    }
                }
            }


            if (sumPixelEye > sumThresholdLow && sumPixelEye < sumThresholdUp && sumPixelPrevEye > sumThresholdLow && sumPixelPrevEye < sumThresholdUp)
            {
                isShook = true;
                return(false);
            }
            ///

            return(true);
        }
        void CountingFingersAndUpdateGUI(object Sender, EventArgs agr)
        {
            int Num_Fingers = 0;

            Double FirstResult  = 0;
            Double SecondResult = 0;

            //querying image

            Orignal_img = WebCam.QueryFrame().Resize(621, 446, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

            if (Orignal_img == null)
            {
                return;
            }
            //Applying YCrCb filter
            //Image<Ycc, Byte> currentYCrCbFrame = imgOrignal.Convert<Ycc, byte>();
            //Image<Gray, byte> GloveColor = new Image<Gray, byte>(imgOrignal.Width, imgOrignal.Height);

            //skin = currentYCrCbFrame.InRange(new Ycc(0,139,0), new Ycc(193, 255, 193));

            gloveDetector = new YCrCbGloveDetector();

            Image <Gray, Byte> GloveColor = gloveDetector.DetectGlove(Orignal_img, YCrCb_min, YCrCb_max);

            OrignalCopy.Image = GloveColor.Resize(190, 167, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            iborignal.Image   = Orignal_img;

            StructuringElementEx Shape_rect_12 = new StructuringElementEx(10, 10, 5, 5, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            //Eroding the source image using the specified structuring element
            CvInvoke.cvErode(GloveColor, GloveColor, Shape_rect_12, 1);

            StructuringElementEx Shape_rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            //dilating the source image using the specified structuring element
            CvInvoke.cvDilate(GloveColor, GloveColor, Shape_rect_6, 1);

            GloveColor = GloveColor.Flip(FLIP.HORIZONTAL);   //by uncmommenting this can stop flipping the image

            GloveColor = GloveColor.SmoothGaussian(9);       //smoothing the filterd , eroded and dilated image.

            Orignal_img = Orignal_img.Flip(FLIP.HORIZONTAL); //**by uncmommenting this can stop flip the image

            //extracting contours,applying convexty defect allgoritm to find the count of fingers

            Contour <Point> Detectedcontours = GloveColor.FindContours();
            Contour <Point> LargestContour   = null;

            //extracting the biggest contour.


            while (Detectedcontours != null)
            {
                FirstResult = Detectedcontours.Area;
                if (FirstResult > SecondResult)
                {
                    SecondResult   = FirstResult;
                    LargestContour = Detectedcontours;
                }
                Detectedcontours = Detectedcontours.HNext;
            }

            //applying convexty defect allgoritm to find the count of fingers

            if (LargestContour != null)
            {
                Num_Fingers = 0; //number of Finger

                LargestContour = LargestContour.ApproxPoly((0.000025));
                Orignal_img.Draw(LargestContour, new Bgr(Color.LimeGreen), 2);

                Hull = LargestContour.GetConvexHull(ORIENTATION.CV_CLOCKWISE);

                Found_defects = LargestContour.GetConvexityDefacts(storage, ORIENTATION.CV_CLOCKWISE);
                Orignal_img.DrawPolyline(Hull.ToArray(), true, new Bgr(0, 0, 256), 3);

                Orignal_img.Draw(new CircleF(new PointF(OutLine_box.center.X, OutLine_box.center.Y), 5), new Bgr(200, 125, 75), 3);
                OutLine_box = LargestContour.GetMinAreaRect();

                filteredHull = new Seq <Point>(storage);
                for (int i = 0; i < Hull.Total; i++)
                {
                    if (Math.Sqrt(Math.Pow(Hull[i].X - Hull[i + 1].X, 2) + Math.Pow(Hull[i].Y - Hull[i + 1].Y, 2)) > OutLine_box.size.Width / 10)
                    {
                        filteredHull.Push(Hull[i]);
                    }
                }

                ArrayOfDefects = Found_defects.ToArray();

                for (int i = 0; i < Found_defects.Total; i++)
                {
                    PointF PointOfStart = new PointF((float)ArrayOfDefects[i].StartPoint.X,
                                                     (float)ArrayOfDefects[i].StartPoint.Y);

                    PointF PointOfdepth = new PointF((float)ArrayOfDefects[i].DepthPoint.X,
                                                     (float)ArrayOfDefects[i].DepthPoint.Y);

                    PointF PointOfend = new PointF((float)ArrayOfDefects[i].EndPoint.X,
                                                   (float)ArrayOfDefects[i].EndPoint.Y);


                    CircleF CircleOfStart = new CircleF(PointOfStart, 10f);
                    CircleF CircleOfDepth = new CircleF(PointOfdepth, 10f);
                    CircleF CircleOfEnd   = new CircleF(PointOfend, 10f);


                    if ((CircleOfStart.Center.Y < OutLine_box.center.Y || CircleOfDepth.Center.Y < OutLine_box.center.Y) &&
                        (CircleOfStart.Center.Y < CircleOfDepth.Center.Y) &&
                        (Math.Sqrt(Math.Pow(CircleOfStart.Center.X - CircleOfDepth.Center.X, 2) +
                                   Math.Pow(CircleOfStart.Center.Y - CircleOfDepth.Center.Y, 2)) >
                         OutLine_box.size.Height / 6.5))
                    {
                        Num_Fingers++;
                    }

                    Orignal_img.Draw(CircleOfStart, new Bgr(Color.SkyBlue), 2);
                    Orignal_img.Draw(CircleOfDepth, new Bgr(Color.Yellow), 2);
                }

                label2.Text = Num_Fingers.ToString();            // updating finger count
            }//End of convexty defect allgoritm

            // Finding the center of contour

            MCvMoments handMovements = new MCvMoments();               // a new MCvMoments object

            try
            {
                handMovements = LargestContour.GetMoments();           // Moments of biggestContour
            }

            catch (NullReferenceException except)
            {
                //label2.Text = except.Message;
                return;
            }

            CvInvoke.cvMoments(LargestContour, ref handMovements, 0);

            double m_00 = CvInvoke.cvGetSpatialMoment(ref handMovements, 0, 0);
            double m_10 = CvInvoke.cvGetSpatialMoment(ref handMovements, 1, 0);
            double m_01 = CvInvoke.cvGetSpatialMoment(ref handMovements, 0, 1);

            int current_X = Convert.ToInt32(m_10 / m_00) / 10;      // X location of centre of contour
            int current_Y = Convert.ToInt32(m_01 / m_00) / 10;      // Y location of center of contour

            // transfer control to webcam only if button has already been clicked

            if (PressedButtonState)
            {
                // move cursor to center of contour only if Finger count is 1 or 0 (finger_num<=1)
                // i.e. palm is closed

                if (Num_Fingers <= 1)
                {
                    Cursor.Position = new Point(current_X * 20, current_Y * 20);
                }

                // Leave the cursor where it was and Do mouse click, if finger count >= 4

                if (Num_Fingers == 5)
                {
                    DoMouseRightClick();                     //  mouse left button click funtion
                }

                if (Num_Fingers == 4)
                {
                    DoMouseDoubleClick();                     //  mouse left button  click funtion
                }
            }

            iborignal.Image = Orignal_img;               // display orignal image
        }
예제 #17
0
파일: Form1.cs 프로젝트: AirNav/AirNav
        public void ProcessFrameAndUpdateGUI(object sender, EventArgs e)
        {
            int    fingerCount = 0;
            Double contourArea = 0;
            Double maxArea     = 0;

            imgFrame = WebCamFeed.QueryFrame();

            sensitivity = sensitivitySlider.Value;      //Defines scope for navigation ; changed from the SETTINGS page.

            if (imgFrame == null)
            {
                return;
            }

            if (gloveMode.Checked)          // Changes 'minRange' & 'maxRange' for Contrasting Backgrounds
            {
                gloveColor.Enabled = true;
            }
            else
            {
                gloveColor.Enabled = false;
            }

            Image <Ycc, Byte> currentYCrCbFrame = imgFrame.Convert <Ycc, byte>();
            //Converts the original frame to a YCrCb frame for filtering.
            Image <Gray, byte> skin = new Image <Gray, byte>(imgFrame.Width, imgFrame.Height);

            //Stores Gray image of areas of interest
            skin = currentYCrCbFrame.InRange(minRange, maxRange);
            //Extracts areas within range for skin color

            //Noise Filtering
            StructuringElementEx rect_12 = new StructuringElementEx(10, 10, 5, 5, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(skin, skin, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(skin, skin, rect_6, 2);
            skin = skin.Flip(FLIP.HORIZONTAL);
            skin = skin.SmoothGaussian(9);

            imgFrame = imgFrame.Flip(FLIP.HORIZONTAL);

            Contour <Point> contours       = skin.FindContours();
            Contour <Point> biggestContour = null;

            while (contours != null)
            {
                contourArea = contours.Area;
                if (contourArea > maxArea)
                {
                    maxArea        = contourArea;
                    biggestContour = contours;
                }
                contours = contours.HNext;          //checks for the next contour
            }

            #region Convexity Defects Algorithm
            if (biggestContour != null)
            {
                fingerCount = 0;

                biggestContour = biggestContour.ApproxPoly(0.00025);
                imgFrame.Draw(biggestContour, new Bgr(Color.LimeGreen), 2);  //For debugging

                Hull    = biggestContour.GetConvexHull(ORIENTATION.CV_CLOCKWISE);
                defects = biggestContour.GetConvexityDefacts(storage, ORIENTATION.CV_CLOCKWISE);
                imgFrame.DrawPolyline(Hull.ToArray(), true, new Bgr(0, 0, 256), 2);     //For debugging

                box = biggestContour.GetMinAreaRect();

                defectArray = defects.ToArray();
                for (int i = 0; i < defects.Total; i++)
                {
                    PointF startPoint = new PointF((float)defectArray[i].StartPoint.X,
                                                   (float)defectArray[i].StartPoint.Y);

                    PointF depthPoint = new PointF((float)defectArray[i].DepthPoint.X,
                                                   (float)defectArray[i].DepthPoint.Y);

                    PointF endPoint = new PointF((float)defectArray[i].EndPoint.X,
                                                 (float)defectArray[i].EndPoint.Y);


                    CircleF startCircle = new CircleF(startPoint, 5f);
                    CircleF depthCircle = new CircleF(depthPoint, 5f);
                    CircleF endCircle   = new CircleF(endPoint, 5f);


                    if ((startCircle.Center.Y < box.center.Y || depthCircle.Center.Y < box.center.Y) &&
                        (startCircle.Center.Y < depthCircle.Center.Y) &&
                        (Math.Sqrt(Math.Pow(startCircle.Center.X - depthCircle.Center.X, 2) +
                                   Math.Pow(startCircle.Center.Y - depthCircle.Center.Y, 2)) >
                         box.size.Height / 6.5))
                    {
                        fingerCount++;
                    }
                }
            }
            #endregion

            MCvMoments moment = new MCvMoments();

            try
            {
                moment = biggestContour.GetMoments();
            }
            catch (NullReferenceException except)
            {
                AirNav.BalloonTipTitle = "AirNav";
                AirNav.BalloonTipText  = "Contour Not Found: " + except.Message;
                AirNav.ShowBalloonTip(1000);
                return;
            }

            try
            {
                CvInvoke.cvMoments(biggestContour, ref moment, 0);
            }
            catch (Exception)
            {
                throw;
            }

            double m00 = CvInvoke.cvGetSpatialMoment(ref moment, 0, 0);
            double m10 = CvInvoke.cvGetSpatialMoment(ref moment, 1, 0);
            double m01 = CvInvoke.cvGetSpatialMoment(ref moment, 0, 1);

            int currentX = Convert.ToInt32(m10 / m00) / 10;
            int currentY = Convert.ToInt32(m01 / m00) / 10;
            // Current coordinates of the center of the palm

            if (showCoordinates)
            {
                coordinatesTextBox.Text = "X: " + currentX.ToString() + "     Y: " + currentY.ToString();
            }

            if (fingerCount >= 4 && !started && startPressed)
            {
                started = true;
            }
            if (started)
            {
                Navigate(fingerCount, currentX, currentY);
            }

            display.SetZoomScale(0.5, new Point(100, 100));
            if (displayToggle)
            {
                display.Image = skin;
            }
            else
            {
                display.Image = imgFrame;
            }
        }
예제 #18
0
        public double MatchIteration(Matrix X, Matrix Y, Matrix V1, Matrix V2, Matrix t1, Matrix t2)
        {
            timer.Clear();
            double timeused = 0;

            timer.Restart();
            var tk   = t1.Clone(); // tk=t1;
            int w    = 4;
            int ndum = (int)(nsamp * ndum_frac);

            nsamp1 = nsamp2 = nsamp;

            #region demo2迭代
            Matrix Xk = X.Clone();
            int    N1 = V1.Rows, N2 = V2.Columns;

            if (display_flag)
            {
                Draw(X, Y, V1, V2, @"D:\Play Data\Iteration\原图.bmp", "原始图像和采样");
                //DrawGradient(X, Y, t1, t2, N2, N1, @"D:\Play Data\Iteration\原图梯度.bmp", "原始图像切向量");
            }

            int k         = 0;
            var out_vec_1 = Utils.InitArray <bool>(nsamp1, false); //out_vec_1=zeros(1,nsamp1);
            var out_vec_2 = Utils.InitArray <bool>(nsamp2, false); //out_vec_2=zeros(1,nsamp2);

            double ori_weight = 0.1;
            double tan_eps = 1.0;
            bool   affine_start_flag = true;
            bool   polarity_flag = true;
            double matchcost = double.MaxValue;
            double sc_cost = 0, aff_cost = 0, E = 0;

            Matrix cx = null, cy = null; // cx, cy是插值线性方程组的解的两列
            Matrix axt = null, wxt = null, ayt = null, wyt = null, d2 = null, U = null,
                   X2 = null, Y2 = null, X2b = null, X3b = null, Y3 = null;
            double mean_dist_1 = 0, mean_dist_2 = 0;
            var    min1 = new[] { new { Val = 0.0, Idx = 0 } };
            var    min2 = min1;

            #region 用于打网格的坐标
            Matrix coordX = null, coordY = null;
            int    coordMargin = (int)(N1 * coordMarginRate);
            MatrixUtils.CreateGrid(N1 + coordMargin * 2, N2 + coordMargin * 2, out coordX, out coordY);
            coordX = coordX.Each(v => v - coordMargin * 2);
            coordY = coordY.Each(v => v - coordMargin * 2);
            //int MM = N1 * N2 / 25; // M=length(x);
            #endregion

            timeused += timer.StopAndSay("初始化");

            while (k < n_iter)
            {
                Debug("Iter={0}", k);

                #region 计算两个形状上下文
                timer.Restart();
                // [BH1,mean_dist_1]=sc_compute(Xk',zeros(1,nsamp),mean_dist_global,nbins_theta,nbins_r,r_inner,r_outer,out_vec_1);
                var BH1 = ComputeSC(Xk.Transpose(), Zeros(1, nsamp), mean_dist_global, out mean_dist_1, out_vec_1);
                //var BH1 = ComputeSC(Xk.Transpose(), t1.Transpose(), mean_dist_global, out mean_dist_1, out_vec_1);

                // [BH2,mean_dist_2]=sc_compute(Y',zeros(1,nsamp),mean_dist_global,nbins_theta,nbins_r,r_inner,r_outer,out_vec_2);
                var BH2 = ComputeSC(Y.Transpose(), Zeros(1, nsamp), mean_dist_global, out mean_dist_2, out_vec_2);
                //var BH2 = ComputeSC(Y.Transpose(), t2.Transpose(), mean_dist_global, out mean_dist_2, out_vec_2);

                timeused += timer.StopAndSay("计算两个形状上下文");

                Debug("Mean_dist_1:{0:F4}", mean_dist_1);
                Debug("Mean_dist_2:{0:F4}", mean_dist_2);
                #endregion

                #region 计算lambda_o和beta_k
                double lambda_o;
                if (affine_start_flag)
                {
                    if (k == 0)
                    {
                        lambda_o = 1000;
                    }
                    else
                    {
                        lambda_o = beta_init * Math.Pow(r, k - 1); // lambda_o=beta_init*r^(k-2);
                    }
                }
                else
                {
                    lambda_o = beta_init * Math.Pow(r, k); // lambda_o=beta_init*r^(k-1);
                }
                double beta_k = mean_dist_2 * mean_dist_2 * lambda_o;
                #endregion

                #region 计算代价矩阵
                timer.Restart();
                var costmat_shape = HistCost(BH1, BH2); // costmat_shape = hist_cost_2(BH1, BH2);

                // theta_diff=repmat(tk,1,nsamp)-repmat(t2',nsamp,1);
                var theta_diff = tk.RepMat(1, nsamp) - t2.Transpose().RepMat(nsamp, 1);

                Matrix costmat_theta;
                if (polarity_flag)
                {
                    // costmat_theta=0.5*(1-cos(theta_diff));
                    //costmat_theta = 0.5 * (Ones(costmat_shape.Rows, costmat_shape.Columns) - theta_diff.Each(v => Math.Cos(v)));
                    costmat_theta = theta_diff.Each(v => 0.5 * (1 - Math.Cos(v)));
                }
                else
                {
                    // costmat_theta=0.5*(1-cos(2*theta_diff));
                    //costmat_theta = 0.5 * (Ones(costmat_shape.Rows, costmat_shape.Columns) - theta_diff.Each(v => Math.Cos(2 * v)));
                    costmat_theta = theta_diff.Each(v => 0.5 * (1 - Math.Cos(2 * v)));
                }
                // costmat=(1-ori_weight)*costmat_shape+ori_weight*costmat_theta;
                var costmat = (1 - ori_weight) * costmat_shape + ori_weight * costmat_theta;

                int nptsd    = nsamp + ndum;                           // nptsd=nsamp+ndum;
                var costmat2 = new DenseMatrix(nptsd, nptsd, eps_dum); // costmat2=eps_dum*ones(nptsd,nptsd);
                costmat2.SetSubMatrix(0, nsamp, 0, nsamp, costmat);    // costmat2(1:nsamp,1:nsamp)=costmat;
                timeused += timer.StopAndSay("计算代价矩阵");
                #endregion

                #region 匈牙利算法
                timer.Restart();
                var costmat_int = new int[nptsd, nptsd];
                for (int i = 0; i < nptsd; ++i)
                {
                    for (int j = 0; j < nptsd; ++j)
                    {
                        costmat_int[i, j] = (int)(costmat2[i, j] * 10000);
                    }
                }
                var km = new KM(nptsd, costmat_int);
                km.Match(false);
                matchcost = km.MatchResult / 10000.0;
                int[] cvec = km.MatchPair; // cvec=hungarian(costmat2);
                timeused += timer.StopAndSay("匈牙利算法");
                #endregion

                #region 计算野点标记向量,重排匹配点
                timer.Restart();
                int[] cvec2 = cvec.Select((v, i) => new { Val = v, Idx = i })
                              .OrderBy(v => v.Val)
                              .Select(v => v.Idx)
                              .ToArray();                                         // [a,cvec2]=sort(cvec);
                out_vec_1 = cvec2.Take(nsamp1).Select(v => v > nsamp2).ToArray(); // out_vec_1=cvec2(1:nsamp1)>nsamp2;
                out_vec_2 = cvec.Take(nsamp2).Select(v => v > nsamp1).ToArray();  // out_vec_2=cvec(1:nsamp2)>nsamp1;

                //X2 = NaNs(nptsd, 2); // X2=NaN*ones(nptsd,2);
                //X2.SetSubMatrix(0, nsamp1, 0, X2.Columns, Xk); // X2(1:nsamp1,:)=Xk;
                //X2 = X2.SortRowsBy(cvec); // X2=X2(cvec,:);

                X2b = NaNs(nptsd, 2);                           // X2b=NaN*ones(nptsd,2);
                X2b.SetSubMatrix(0, nsamp1, 0, X2b.Columns, X); // X2b(1:nsamp1,:)=X;
                X2b = X2b.SortRowsBy(cvec);                     // X2b=X2b(cvec,:);

                Y2 = NaNs(nptsd, 2);                            // Y2=NaN*ones(nptsd,2);
                Y2.SetSubMatrix(0, nsamp2, 0, Y2.Columns, Y);   // Y2(1:nsamp2,:)=Y;


                var ind_good = X2b.GetColumn(1).Take(nsamp).FindIdxBy(v => !double.IsNaN(v)); // ind_good=find(~isnan(X2b(1:nsamp,1)));
                int n_good   = ind_good.Length;                                               // n_good=length(ind_good);

                X3b       = X2b.FilterRowsBy(ind_good);                                       //  X3b=X2b(ind_good,:);
                Y3        = Y2.FilterRowsBy(ind_good);                                        // Y3=Y2(ind_good,:);
                timeused += timer.StopAndSay("计算野点标记向量,重排匹配点");
                #endregion

                #region figure 2
                if (display_flag)
                {
                    //figure(2)
                    //plot(X2(:,1),X2(:,2),'b+',Y2(:,1),Y2(:,2),'ro')
                    //hold on
                    //h=plot([X2(:,1) Y2(:,1)]',[X2(:,2) Y2(:,2)]','k-');

                    //if display_flag
                    //%	 set(h,'linewidth',1)
                    //quiver(Xk(:,1),Xk(:,2),cos(tk),sin(tk),0.5,'b') // 画箭头
                    //quiver(Y(:,1),Y(:,2),cos(t2),sin(t2),0.5,'r')
                    DrawGradient(Xk, Y, tk, t2, N2, N1,
                                 String.Format(@"D:\Play Data\Iteration\梯度\{0}.bmp", k),
                                 String.Format("Iter={0}梯度方向\n匹配点数{1}", k, n_good));
                    //end
                    //hold off
                    //axis('ij')
                    //title([int2str(n_good) ' correspondences (warped X)'])
                    //axis([1 N2 1 N1])
                    //drawnow
                }
                #endregion

                #region figure 3 显示未变形图的匹配关系
                if (display_flag)
                {
                    //% show the correspondences between the untransformed images
                    //figure(3)
                    //plot(X(:,1),X(:,2),'b+',Y(:,1),Y(:,2),'ro')
                    //ind=cvec(ind_good);
                    //hold on
                    //plot([X2b(:,1) Y2(:,1)]',[X2b(:,2) Y2(:,2)]','k-')
                    //hold off
                    //axis('ij')
                    //title([int2str(n_good) ' correspondences (unwarped X)'])
                    //axis([1 N2 1 N1])
                    //drawnow
                    Draw(X, Y, cvec, null, N2, N1, String.Format(@"D:\Play Data\Iteration\匹配\{0}.bmp", k),
                         String.Format("Iter={0}\n匹配代价{1},匹配数{2}", k, matchcost, n_good));
                }
                #endregion

                #region 求解变换矩阵
                timer.Restart();
                Bookstein(X3b, Y3, beta_k, ref cx, ref cy, ref E); // [cx,cy,E]=bookstein(X3b,Y3,beta_k);
                timeused += timer.StopAndSay("求解变换矩阵");
                #endregion

                #region 通过解出来的变换对点和梯度进行变换
                timer.Restart();
                // % calculate affine cost
                var A = MatrixUtils.RankHorizon(
                    cx.GetSubMatrix(n_good + 1, 2, 0, 1), cy.GetSubMatrix(n_good + 1, 2, 0, 1)
                    );                            //A=[cx(n_good+2:n_good+3,:) cy(n_good+2:n_good+3,:)];
                var s = new Svd(A, true).S();     // s=svd(A);
                aff_cost = Math.Log(s[0] / s[1]); // aff_cost=log(s(1)/s(2));

                // % calculate shape context cost
                min1 = costmat.GetColumns().Select(col => {
                    int minwhere = 0;
                    for (int i = 1; i < col.Count; ++i)
                    {
                        if (col[i] < col[minwhere])
                        {
                            minwhere = i;
                        }
                    }
                    return(new { Val = col[minwhere], Idx = minwhere });
                }).ToArray();// [a1,b1]=min(costmat,[],1);
                min2 = costmat.GetRows().Select(row => {
                    int minwhere = 0;
                    for (int i = 1; i < row.Count; ++i)
                    {
                        if (row[i] < row[minwhere])
                        {
                            minwhere = i;
                        }
                    }
                    return(new { Val = row[minwhere], Idx = minwhere });
                }).ToArray();                                                           // [a2,b2]=min(costmat,[],2);}
                sc_cost = Math.Max(min1.Average(a => a.Val), min2.Average(a => a.Val)); // sc_cost=max(mean(a1),mean(a2));

                // % warp each coordinate
                axt = cx.GetSubMatrix(n_good, 3, 0, 1).Transpose(); // axt是cx中的最后三个,即a的x分量
                wxt = cx.GetSubMatrix(0, n_good, 0, 1).Transpose(); // wxt是cs中的前n个,即w的x分量
                ayt = cy.GetSubMatrix(n_good, 3, 0, 1).Transpose();
                wyt = cy.GetSubMatrix(0, n_good, 0, 1).Transpose();

                d2 = Dist2(X3b, X).Each(v => v > 0 ? v : 0);                // d2=max(dist2(X3b,X),0);
                U  = d2.PointMultiply(d2.Each(v => Math.Log(v + Epsilon))); // U=d2.*log(d2+eps);

                Debug("MatchCost:{0:F4}\taff_cost:{1:F4}\tsc_cost:{2:F4}\tE:{3:F8}", matchcost, aff_cost, sc_cost, E);

                var Z = Transformation(X, U, axt, wxt, ayt, wyt);

                //% apply the warp to the tangent vectors to get the new angles
                var Xtan = X + tan_eps * MatrixUtils.RankHorizon(t1.Each(Math.Cos), t1.Each(Math.Sin)); // Xtan=X+tan_eps*[cos(t1) sin(t1)];
                d2 = Dist2(X3b, Xtan).Each(v => v > 0 ? v : 0);                                         // d2=max(dist2(X3b,Xtan),0);
                U  = d2.PointMultiply(d2.Each(v => Math.Log(v + Epsilon)));                             // U=d2.*log(d2+eps);
                //Transformation(Xtan, U, axt, wxt, ayt, wyt, out fx, out fy);
                //var Ztan = MatrixUtils.RankVertical(fx, fy).Transpose(); // Ztan=[fx; fy]';
                var Ztan = Transformation(Xtan, U, axt, wxt, ayt, wyt);
                for (int i = 0; i < nsamp; ++i)
                {
                    tk[i, 0] = Math.Atan2(Ztan[i, 1] - Z[i, 1], Ztan[i, 0] - Z[i, 0]);
                }//tk=atan2(Ztan(:,2)-Z(:,2),Ztan(:,1)-Z(:,1));
                timeused += timer.StopAndSay("通过解出来的变换对点和梯度进行变换");
                #endregion

                #region figure 4 显示变形后的点集
                if (display_flag)
                {
                    //figure(4)
                    //plot(Z(:,1),Z(:,2),'b+',Y(:,1),Y(:,2),'ro');
                    //axis('ij')
                    //title(['k=' int2str(k) ', \lambda_o=' num2str(lambda_o) ', I_f=' num2str(E) ', aff.cost=' num2str(aff_cost) ', SC cost=' num2str(sc_cost)])
                    //axis([1 N2 1 N1])
                    //% show warped coordinate grid
                    //fx_aff=cx(n_good+1:n_good+3)'*[ones(1,M); x'; y'];
                    //d2=dist2(X3b,[x y]);
                    //fx_wrp=cx(1:n_good)'*(d2.*log(d2+eps));
                    //fx=fx_aff+fx_wrp;
                    //fy_aff=cy(n_good+1:n_good+3)'*[ones(1,M); x'; y'];
                    //fy_wrp=cy(1:n_good)'*(d2.*log(d2+eps));
                    //fy=fy_aff+fy_wrp;
                    //hold on
                    //plot(fx,fy,'k.','markersize',1)
                    //hold off
                    //drawnow
                    d2 = Dist2(X3b, MatrixUtils.RankHorizon(coordX, coordY));//d2=dist2(X3b,[x y]);
                    U  = d2.PointMultiply(d2.Each(v => Math.Log(v + Epsilon)));
                    //Transformation(MatrixUtils.RankHorizon(coordX, coordY), U, axt, wxt, ayt, wyt, out fx, out fy);
                    //var coordsT = MatrixUtils.RankVertical(fx, fy).Transpose();
                    var coordsT = Transformation(MatrixUtils.RankHorizon(coordX, coordY), U, axt, wxt, ayt, wyt);

                    Draw(Z, Y, null, coordsT, N2, N1, String.Format(@"D:\Play Data\Iteration\变换\{0}.bmp", k),
                         String.Format("Iter={0}\nλo={1:F4},If={2:F4},aff_cost{3:F4},sc_cost{4:F4}", k, lambda_o, E, aff_cost, sc_cost));
                }
                #endregion

                Xk = Z.Clone();
                ++k;
            }

            #endregion

            #region 迭代完成后的代价计算

            #region 我来尝试计算Dsc
            // Xk是Q变换后的结果,而Y是模板图形P

            /*
             * Dsc = Avg_each_p_in_P(argmin(q in Q, C(p, T(q))) + Avg_each_q_in_Q(argmin(p in P, C(p, T(q)))
             * */
            double mean_dist_final_1;
            var    scQ = ComputeSC(Xk.Transpose(), Zeros(1, nsamp), mean_dist_global, out mean_dist_final_1, out_vec_1);
            //var scQ = ComputeSC(Xk.Transpose(), Zeros(1, nsamp), mean_dist_1, out mean_dist_final_1, out_vec_1);
            //var scQ = ComputeSC(Xk.Transpose(), t1.Transpose(), mean_dist_1, out mean_dist_final_1, out_vec_1);

            double mean_dist_final_2;
            var    scP = ComputeSC(Y.Transpose(), Zeros(1, nsamp), mean_dist_global, out mean_dist_final_2, out_vec_2);
            //var scP = ComputeSC(Y.Transpose(), Zeros(1, nsamp), mean_dist_2, out mean_dist_final_2, out_vec_2);
            //var scP = ComputeSC(Y.Transpose(), t2.Transpose(), mean_dist_2, out mean_dist_final_2, out_vec_2);

            var    costmat_final = HistCost(scQ, scP);
            double distance_sc   = costmat_final.GetRows().Select(row => row.Min()).Average()
                                   + costmat_final.GetColumns().Select(col => col.Min()).Average();
            Debug("distance_sc:{0}", distance_sc);

            #endregion

            #region 图像变换和插值
            timer.Restart();
            //[x,y]=meshgrid(1:N2,1:N1);
            //x=x(:);y=y(:);
            Matrix x = null, y = null;
            MatrixUtils.CreateGrid(N1, N2, out x, out y);
            //int M = N1 * N2; // M=length(x);
            d2 = Dist2(X3b, MatrixUtils.RankHorizon(x, y));//d2=dist2(X3b,[x y]);
            U  = d2.PointMultiply(d2.Each(v => Math.Log(v + Epsilon)));
            //Transformation(MatrixUtils.RankHorizon(x, y), U, axt, wxt, ayt, wyt, out fx, out fy);
            var fxy = Transformation(MatrixUtils.RankHorizon(x, y), U, axt, wxt, ayt, wyt);

            //disp('computing warped image...')
            //V1w=griddata(reshape(fx,N1,N2),reshape(fy,N1,N2),V1,reshape(x,N1,N2),reshape(y,N1,N2));
            Matrix V1w = Interpolation(
                fxy.GetSubMatrix(0, fxy.Rows, 0, 1).Reshape(N1, N2),
                fxy.GetSubMatrix(0, fxy.Rows, 1, 1).Reshape(N1, N2),
                V1
                );

            #region 这个山寨插值方法会造成图像裂缝,用闭运算来尝试修补
            Image <Gray, Byte> img = new Image <Gray, byte>(N2, N1);
            for (int i = 0; i < N2; ++i)
            {
                for (int j = 0; j < N1; ++j)
                {
                    img[i, j] = new Gray(V1w[i, j] * 255);
                }
            }
            var see = new StructuringElementEx(new int[, ] {
                { 1, 1, 1 }, { 1, 1, 1 }, { 1, 1, 1 }
            }, 1, 1);
            //img = img.MorphologyEx(see, Emgu.CV.CvEnum.CV_MORPH_OP.CV_MOP_CLOSE, 1);
            img = img.Dilate(1).Erode(1);
            for (int i = 0; i < N2; ++i)
            {
                for (int j = 0; j < N1; ++j)
                {
                    V1w[i, j] = img[i, j].Intensity / 255;
                }
            }
            img.Dispose();
            #endregion
            timeused += timer.StopAndSay("图像变换和插值");
            #endregion

            //fz=find(isnan(V1w));
            //V1w(fz)=0;
            var ssd        = (V2 - V1w).Each(v => v * v); //ssd=(V2-V1w).^2;			%%%%%%SSD在这里
            var ssd_global = ssd.SumAll();                //ssd_global=sum(ssd(:));
            Debug("ssd_global:{0}", ssd_global);

            #region figure 5
            if (display_flag)
            {
                //   figure(5)
                //   subplot(2,2,1)
                //   im(V1)
                //   subplot(2,2,2)
                //   im(V2)
                //   subplot(2,2,4)
                //   im(V1w)
                //   title('V1 after warping')
                //   subplot(2,2,3)
                //   im(V2-V1w)
                //   h=title(['SSD=' num2str(ssd_global)]);
                //   colormap(cmap)
            }
            #endregion

            #region 窗口SSD比较

            timer.Restart();
            //%%%
            //%%% windowed SSD comparison
            //%%%
            var wd      = 2 * w + 1;                      //wd=2*w+1;
            var win_fun = MatrixUtils.GaussianKernal(wd); //win_fun=gaussker(wd);
            //% extract sets of blocks around each coordinate
            //% first do 1st shape; need to use transformed coords.
            var win_list_1 = Zeros(nsamp, wd * wd);//win_list_1=zeros(nsamp,wd^2);
            for (int qq = 0; qq < nsamp; ++qq)
            {
                int row_qq = (int)Xk[qq, 1],                            //   row_qq=round(Xk(qq,2));
                    col_qq = (int)Xk[qq, 0];                            //   col_qq=round(Xk(qq,1));
                row_qq = Math.Max(w + 1, Math.Min(N1 - w - 1, row_qq)); //   row_qq=max(w+1,min(N1-w,row_qq));
                col_qq = Math.Max(w + 1, Math.Min(N2 - w - 1, col_qq)); //   col_qq=max(w+1,min(N2-w,col_qq));
                //   tmp=V1w(row_qq-w:row_qq+w,col_qq-w:col_qq+w);
                var tmp = V1w.GetSubMatrix(row_qq - w, w * 2 + 1, col_qq - w, w * 2 + 1);
                tmp = win_fun.PointMultiply(tmp);//   tmp=win_fun.*tmp;
                //   win_list_1(qq,:)=tmp(:)';
                win_list_1.SetSubMatrix(qq, 1, 0, win_list_1.Columns, tmp.Reshape(1, tmp.Rows * tmp.Columns));
            }

            //% now do 2nd shape
            var win_list_2 = Zeros(nsamp, wd * wd);//win_list_2=zeros(nsamp,wd^2);
            for (int qq = 0; qq < nsamp; ++qq)
            {
                int row_qq = (int)Y[qq, 1],                             //   row_qq = round(Y(qq, 2));
                    col_qq = (int)Y[qq, 0];                             //   col_qq = round(Y(qq, 1));
                row_qq = Math.Max(w + 1, Math.Min(N1 - w - 1, row_qq)); //   row_qq=max(w+1,min(N1-w,row_qq));
                col_qq = Math.Max(w + 1, Math.Min(N2 - w - 1, col_qq)); //   col_qq=max(w+1,min(N2-w,col_qq));
                //   tmp=V2(row_qq-w:row_qq+w,col_qq-w:col_qq+w)
                var tmp = V2.GetSubMatrix(row_qq - w, w * 2 + 1, col_qq - w, w * 2 + 1);
                tmp = win_fun.PointMultiply(tmp);                                                              //   tmp=win_fun.*tmp;
                win_list_2.SetSubMatrix(qq, 1, 0, win_list_2.Columns, tmp.Reshape(1, tmp.Rows * tmp.Columns)); // win_list_2(qq,:)=tmp(:)';
            }

            var ssd_all = Dist2(win_list_1, win_list_2).Each(Math.Sqrt);//ssd_all=sqrt(dist2(win_list_1,win_list_2));
            timeused += timer.StopAndSay("窗口SSD比较");
            #endregion

            #region 最后的KNN,不知道干嘛用

            timer.Restart();
            //%%%%%%%	KNN在此
            //% loop over nearest neighbors in both directions, project in
            //% both directions, take maximum
            double cost_1 = 0, cost_2 = 0;
            //List<double> cost_1 = new List<double>(), cost_2 = new List<double>();
            for (int qq = 0; qq < nsamp; ++qq)
            {
                cost_1 += ssd_all[qq, min2[qq].Idx]; //   cost_1=cost_1+ssd_all(qq,b2(qq));
                cost_2 += ssd_all[min1[qq].Idx, qq]; //   cost_2=cost_2+ssd_all(b1(qq),qq);
                //cost_1.Add(ssd_all[qq, min2[qq].Idx]);
                //cost_2.Add(ssd_all[min1[qq].Idx, qq]);
            }
            var ssd_local     = (1.0 / nsamp) * Math.Max(cost_1, cost_2);
            var ssd_local_avg = (1.0 / nsamp) * 0.5 * (cost_1 + cost_2);
            //var ssd_local = (1.0 / nsamp) * Math.Max(cost_1.Average(), cost_2.Average());//ssd_local=(1/nsamp)*max(mean(cost_1),mean(cost_2));
            //var ssd_local_avg = (1.0 / nsamp) * 0.5 * (cost_1.Average() + cost_2.Average());//ssd_local_avg=(1/nsamp)*0.5*(mean(cost_1)+mean(cost_2));
            Debug("ssd_local:{0}", ssd_local);
            Debug("ssd_local_avg:{0}", ssd_local_avg);
            timeused += timer.StopAndSay("计算ssd_local");
            #region 最后的组合图
            //if display_flag
            //%   set(h,'string',['local SSD=' num2str(ssd_local) ', avg. local SSD=' num2str(ssd_local_avg)])
            //   set(h,'string',['local SSD=' num2str(ssd_local)])
            //end
            if (display_flag)
            {
                Draw(V1, @"D:\Play Data\Iteration\结果\0-V1.bmp", "V1");
                Draw(V2, @"D:\Play Data\Iteration\结果\1-V2.bmp", "V2");
                Draw(V1w, @"D:\Play Data\Iteration\结果\2-V1w.bmp", "V1 after warping");
                Draw(V2 - V1w, @"D:\Play Data\Iteration\结果\3-V2-V1w.bmp",
                     String.Format("local SSD={0:F8}", ssd_local_avg));
            }
            #endregion
            #endregion

            #endregion

            var distance = 1.6 * ssd_local_avg + distance_sc + 0.3 * E; // 不知道最后的距离公式到底是什么
            Debug("distance={0:F8}", distance);
            return(distance);
        }
예제 #19
0
        void ProcessFramAndUpdateGUI(object Sender, EventArgs agr)
        {
            int    Finger_num = 0;
            Double Result1    = 0;
            Double Result2    = 0;

            //querying image
            imgOrignal = CapWebCam.QueryFrame();

            if (imgOrignal == null)
            {
                return;
            }
            //Applying YCrCb filter
            Image <Ycc, Byte>  currentYCrCbFrame = imgOrignal.Convert <Ycc, byte>();
            Image <Gray, byte> skin = new Image <Gray, byte>(imgOrignal.Width, imgOrignal.Height);

            skin = currentYCrCbFrame.InRange(new Ycc(0, 131, 80), new Ycc(255, 185, 135));

            StructuringElementEx rect_12 = new StructuringElementEx(10, 10, 5, 5, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            //Eroding the source image using the specified structuring element
            CvInvoke.cvErode(skin, skin, rect_12, 1);

            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            //dilating the source image using the specified structuring element
            CvInvoke.cvDilate(skin, skin, rect_6, 2);

            skin = skin.Flip(FLIP.HORIZONTAL);
            //smoothing the filterd , eroded and dilated image.
            skin = skin.SmoothGaussian(9);

            imgOrignal = imgOrignal.Flip(FLIP.HORIZONTAL);
            //extracting contours.
            Contour <Point> contours = skin.FindContours();

            Contour <Point> biggestContour = null;

            //extracting the biggest contour.
            while (contours != null)
            {
                Result1 = contours.Area;
                if (Result1 > Result2)
                {
                    Result2        = Result1;
                    biggestContour = contours;
                }
                contours = contours.HNext;
            }
            //applying convexty defect allgoritm to find the count of fingers
            if (biggestContour != null)
            {
                Finger_num = 0;

                biggestContour = biggestContour.ApproxPoly((0.00025));
                imgOrignal.Draw(biggestContour, new Bgr(Color.LimeGreen), 2);

                Hull    = biggestContour.GetConvexHull(ORIENTATION.CV_CLOCKWISE);
                defects = biggestContour.GetConvexityDefacts(storage, ORIENTATION.CV_CLOCKWISE);
                imgOrignal.DrawPolyline(Hull.ToArray(), true, new Bgr(0, 0, 256), 2);

                box = biggestContour.GetMinAreaRect();

                defectArray = defects.ToArray();

                for (int i = 0; i < defects.Total; i++)
                {
                    PointF startPoint = new PointF((float)defectArray[i].StartPoint.X,
                                                   (float)defectArray[i].StartPoint.Y);

                    PointF depthPoint = new PointF((float)defectArray[i].DepthPoint.X,
                                                   (float)defectArray[i].DepthPoint.Y);

                    PointF endPoint = new PointF((float)defectArray[i].EndPoint.X,
                                                 (float)defectArray[i].EndPoint.Y);


                    CircleF startCircle = new CircleF(startPoint, 5f);
                    CircleF depthCircle = new CircleF(depthPoint, 5f);
                    CircleF endCircle   = new CircleF(endPoint, 5f);


                    if ((startCircle.Center.Y < box.center.Y || depthCircle.Center.Y < box.center.Y) &&
                        (startCircle.Center.Y < depthCircle.Center.Y) &&
                        (Math.Sqrt(Math.Pow(startCircle.Center.X - depthCircle.Center.X, 2) +
                                   Math.Pow(startCircle.Center.Y - depthCircle.Center.Y, 2)) >
                         box.size.Height / 6.5))
                    {
                        Finger_num++;
                    }
                }

                label2.Text = Finger_num.ToString();            // updating finger count
            }

            // Finding the center of contour

            MCvMoments moment = new MCvMoments();               // a new MCvMoments object

            try
            {
                moment = biggestContour.GetMoments();           // Moments of biggestContour
            }
            catch (NullReferenceException except)
            {
                //label3.Text = except.Message;
                return;
            }

            CvInvoke.cvMoments(biggestContour, ref moment, 0);

            double m_00 = CvInvoke.cvGetSpatialMoment(ref moment, 0, 0);
            double m_10 = CvInvoke.cvGetSpatialMoment(ref moment, 1, 0);
            double m_01 = CvInvoke.cvGetSpatialMoment(ref moment, 0, 1);

            int current_X = Convert.ToInt32(m_10 / m_00) / 10;      // X location of centre of contour
            int current_Y = Convert.ToInt32(m_01 / m_00) / 10;      // Y location of center of contour

            // transfer control to webcam only if button has already been clicked

            if (button_pressed)
            {
                // move cursor to center of contour only if Finger count is 1 or 0
                // i.e. palm is closed

                if (Finger_num == 0 || Finger_num == 1)
                {
                    Cursor.Position = new Point(current_X * 20, current_Y * 20);
                }

                // Leave the cursor where it was and Do mouse click, if finger count >= 4

                if (Finger_num >= 4)
                {
                    DoMouseClick();                     // function clicks mouse left button
                }
            }

            iborignal.Image = imgOrignal;               // display orignal image
        }
예제 #20
0
        //각각의 색상을 트레킹해서 rect을 만들어줌
        public void color_traking(int index, int min1, int min2, int min3, int max1, int max2, int max3, Image <Bgr, Byte> iamge, Rectangle[] rect)
        {
            int pixCount = 0, small_pixCount = 0;

            Image <Ycc, Byte>  YCrCbFrame = iamge.Convert <Ycc, Byte>(); //YCrCb 변환
            Image <Gray, byte> colorSetting = new Image <Gray, byte>(YCrCbFrame.Width, YCrCbFrame.Height);

            Ycc YCrCb_min = new Ycc(min1, min2, min3);
            Ycc YCrCb_max = new Ycc(max1, max2, max3);                         //blue 색 범위

            colorSetting = YCrCbFrame.InRange((Ycc)YCrCb_min, (Ycc)YCrCb_max); //색 범위 설정

            StructuringElementEx rect_12 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(colorSetting, colorSetting, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(colorSetting, colorSetting, rect_6, 2);          //수축 팽창

            Image <Bgr, Byte> colorCount = colorSetting.Convert <Bgr, Byte>(); //픽셀수 세기 위해

            //작은 원 찾기
            //YCrCb_min = new Ycc(0, 0, 0);
            //YCrCb_max = new Ycc(255, 146, 100);   //큰원yellow 색 범위

            YCrCb_min = new Ycc(0, 0, 0);
            //YCrCb_max = new Ycc(255, 150, 114);   //작은 원 yellow 색 범위
            YCrCb_max = new Ycc(255, 150, 120);   //작은 원 yellow 색 범위


            colorSetting = YCrCbFrame.InRange((Ycc)YCrCb_min, (Ycc)YCrCb_max); //색 범위 설정

            rect_12 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);
            CvInvoke.cvErode(colorSetting, colorSetting, rect_12, 1);
            rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);
            CvInvoke.cvDilate(colorSetting, colorSetting, rect_6, 2);                //수축 팽창

            Image <Bgr, Byte> small_colorCount = colorSetting.Convert <Bgr, Byte>(); //픽셀수 세기 위해

            int x_p = 0, y_p = 0;                                                    // 큰 원 픽셀수 저장
            int small_x = 0, small_y = 0;                                            // 작은원 픽셀수 저장

            //이미지가 범위를 벗어날경우 처리
            if (color_ROI[index].X < 0)
            {
                color_ROI[index].X = 0;
            }
            if (color_ROI[index].Y < 0)
            {
                color_ROI[index].Y = 0;
            }

            if (color_ROI[index].X + img_width > iamge.Width)
            {
                color_ROI[index].X = iamge.Width - img_width;
            }
            if (color_ROI[index].Y + img_height > iamge.Height)
            {
                color_ROI[index].Y = iamge.Height - img_height;
            }

            //픽셀수 셈
            for (int x = color_ROI[index].X; x < color_ROI[index].X + img_width; x++)
            {
                for (int y = color_ROI[index].Y; y < color_ROI[index].Y + img_height; y++)
                {
                    if (!colorCount[y, x].Equals(new Bgr(0, 0, 0)))
                    {
                        pixCount++;
                        x_p += x;
                        y_p += y;
                    }
                }
            }

            //픽셀 개수에 따라
            if (pixCount >= 10) //개수가 0이 아닐때 ROI 변경해줌
            {
                int big_center_x = x_p / pixCount;
                int big_center_y = y_p / pixCount; //큰 원 중심좌표

                //사라진것을 판별하기 위해.. 원랜 마이너스값으로 좌표가 계산되어 일부러 음수좌표는 0으로 만들고 사라졌을때 좌표를 -1로 만듬
                int tmp_x = big_center_x - glo.TemplateWidth / 2;
                int tmp_y = big_center_y - glo.TemplateHeight / 2;

                int tmp_width  = glo.TemplateWidth;
                int tmp_height = glo.TemplateHeight;

                if (tmp_x < 0)
                {
                    tmp_width += tmp_x;
                    tmp_x      = 0;
                }
                if (tmp_y < 0)
                {
                    tmp_height += tmp_y;
                    tmp_y       = 0;
                }

                if (tmp_x + img_width > glo.rect_width)
                {
                    tmp_width = img_width - (tmp_x + img_width - glo.rect_width);
                }

                if (tmp_y + img_height > glo.rect_height)
                {
                    tmp_height = img_height - (tmp_y + img_height - glo.rect_height);
                }

                int x_end = tmp_x + img_width;
                int y_end = tmp_y + img_height;

                if (x_end > glo.rect_width)
                {
                    x_end = glo.rect_width;
                }
                if (y_end > glo.rect_height)
                {
                    y_end = glo.rect_height;
                }


                for (int x = tmp_x; x < x_end; x++)
                {
                    for (int y = tmp_y; y < y_end; y++)
                    {
                        if (!small_colorCount[y, x].Equals(new Bgr(0, 0, 0)))
                        {
                            small_pixCount++;
                            small_x += x;
                            small_y += y;
                        }
                    }
                }

                rect[index]        = new Rectangle(tmp_x, tmp_y, tmp_width, tmp_height); //사각형의 왼쪽 위의 좌표
                color_ROI[index].X = big_center_x - glo.TemplateWidth / 2;
                color_ROI[index].Y = big_center_y - glo.TemplateHeight / 2;

                if (small_pixCount != 0)
                {
                    int small_center_x = small_x / small_pixCount;
                    int small_center_y = small_y / small_pixCount;

                    int C = big_center_x - small_center_x;
                    int D = big_center_y - small_center_y;

                    double E      = Math.Atan2(D, C);
                    double result = E * (180 / 3.14192);

                    if (result < 0)
                    {
                        result = 360 + result;
                    }

                    // glo.mapObstacleLock.EnterWriteLock(); //critical section start

                    double ref_angle = 45;
                    double margin    = 13;


                    if (ref_angle * 2 - margin <= result && result <= ref_angle * 2 + margin)
                    {
                        glo.direction[index] = 0;
                    }
                    else if (ref_angle * 3 - margin <= result && result <= ref_angle * 3 + margin)
                    {
                        glo.direction[index] = 1;
                    }
                    else if (ref_angle * 4 - margin <= result && result <= ref_angle * 4 + margin)
                    {
                        glo.direction[index] = 2;
                    }
                    else if (ref_angle * 5 - margin <= result && result <= ref_angle * 5 + margin)
                    {
                        glo.direction[index] = 3;
                    }
                    else if (ref_angle * 6 - margin <= result && result <= ref_angle * 6 + margin)
                    {
                        glo.direction[index] = 4;
                    }
                    else if (ref_angle * 7 - margin <= result && result <= ref_angle * 7 + margin)
                    {
                        glo.direction[index] = 5;
                    }
                    else if (ref_angle * 0 <= result && result <= ref_angle * 0 + margin || ref_angle * 8 - margin <= result && result <= ref_angle * 8)
                    {
                        glo.direction[index] = 6;
                    }
                    else if (ref_angle * 1 - margin <= result && result <= ref_angle * 1 + margin)
                    {
                        glo.direction[index] = 7;
                    }
                    else
                    {
                        glo.direction[index] = -1;
                    }

                    /*
                     * if (index == 0)
                     * {
                     *   if (glo.direction[index] != -1)
                     *   {
                     *       Console.WriteLine("result = " + result);
                     *       Console.WriteLine("globals.direction[i] = " + glo.direction[index]);
                     *   }
                     *
                     * }
                     */

                    //glo.mapObstacleLock.ExitWriteLock(); //critical section end

                    /*
                     * if (index == 0)
                     * {
                     *  if (glo.direction[index] == -1)
                     *      Console.WriteLine("i = " + index + " direction[index] = " + glo.direction[index] + "알수 없는 각도" + " result = " + result);
                     *  else
                     *      Console.WriteLine("i = " + index + " direction[index] = " + glo.direction[index] + " result = " + result);
                     * }
                     */
                    // if (index == 3)
                    //   Console.WriteLine("");

                    /*
                     * if (glo.direction[index] == -1)
                     * Console.WriteLine("i = " + index + " direction[index] = " + glo.direction[index] + " result = " + result);
                     */
                }
                else
                {
                    rect[index]         = new Rectangle(0, 0, 0, 0); //사라졌을때 좌표를 0,0 길이 0, 0로 만듬
                    color[index]        = false;
                    change_check[index] = true;
                    color_count--;

                    color_ROI[index].X = 0;
                    color_ROI[index].Y = 0;
                    Console.WriteLine("노랑 사라짐");
                }
            }
            else
            {
                rect[index]         = new Rectangle(0, 0, 0, 0); //사라졌을때 좌표를 0,0 길이 0, 0로 만듬
                color[index]        = false;
                change_check[index] = true;
                color_count--;
                color_ROI[index].X = 0;
                color_ROI[index].Y = 0;
                Console.WriteLine("차량 사라짐");
            }
        }
예제 #21
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            using (Image <Bgr, byte> nextFrame = cap.QueryFrame())
            {
                if (nextFrame != null)
                {
                    Image <Gray, byte> grayframe = nextFrame.Convert <Gray, byte>();
                    Image <Bgr, Byte>  img       = nextFrame;



                    //convert to grayscale

                    Image <Gray, Byte> gray = img.Convert <Gray, Byte>();



                    //convert to binary image using the threshold

                    gray = gray.ThresholdBinary(new Gray(128), new Gray(128));



                    // copy pixels from the original image where pixels in

                    // mask image is nonzero

                    Image <Bgr, Byte> newimg = img.Copy(gray);



                    // display result
                    pictureBox2.Image = newimg.ToBitmap();
                    pictureBox1.Image = nextFrame.ToBitmap();



                    /////////////////////



                    //Image<Gray, Byte> grayscale = nextFrame.Convert<Gray, Byte>();
                    //grayscale = grayscale.Canny(new Gray(0), new Gray(255)).Not(); //invert with Not()
                    //img = nextFrame.And(grayscale.Convert<Bgr, Byte>(), grayscale); //And function in action

                    //pictureBox3.Image = img.ToBitmap();

///////////////////////////////

                    Image <Bgr, Byte>  ori       = nextFrame;
                    Image <Gray, Byte> grayscale = ori.Convert <Gray, Byte>();

                    Image <Gray, Byte> thresh = grayscale.ThresholdToZero(new Gray(128));//(new Gray(50), new Gray(255));


                    StructuringElementEx ex = new StructuringElementEx(8, 8, 1, 1, CV_ELEMENT_SHAPE.CV_SHAPE_ELLIPSE);

                    thresh._MorphologyEx(ex, CV_MORPH_OP.CV_MOP_OPEN, 1);

                    pictureBox3.Image = thresh.ToBitmap();
                }
            }
        }
예제 #22
0
        //Ycc 색상 모델로 색상 정보 추출함
        public void YccColorCheck(int index, int min1, int min2, int min3, int max1, int max2, int max3)
        {
            int pixCount = 0;

            Image <Ycc, Byte>  YCrCbFrame   = colorCheckImage.Convert <Ycc, Byte>();                       //YCrCb 변환
            Image <Gray, byte> colorSetting = new Image <Gray, byte>(YCrCbFrame.Width, YCrCbFrame.Height); //Ycc범위로 뽑아낸 것을 gray로 바꿔서 수축팽창 하기 위해

            Ycc YCrCb_min = new Ycc(min1, min2, min3);
            Ycc YCrCb_max = new Ycc(max1, max2, max3);                         // 색 범위

            colorSetting = YCrCbFrame.InRange((Ycc)YCrCb_min, (Ycc)YCrCb_max); //색 범위 설정

            StructuringElementEx rect_12 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvErode(colorSetting, colorSetting, rect_12, 1);
            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            CvInvoke.cvDilate(colorSetting, colorSetting, rect_6, 2);          //수축 팽창

            Image <Bgr, Byte> colorCount = colorSetting.Convert <Bgr, Byte>(); //픽셀수 세기 위해

            //이미지가 범위를 벗어날경우 처리
            if (pos_x < 0)
            {
                pos_x = 0;
            }
            if (pos_y < 0)
            {
                pos_y = 0;
            }

            if (pos_x + img_width > colorCheckImage.Width)
            {
                pos_x = colorCheckImage.Width - img_width;
            }
            if (pos_y + img_height > colorCheckImage.Height)
            {
                pos_y = colorCheckImage.Height - img_height;
            }

            for (int x = pos_x; x < pos_x + img_width; x++)
            {
                for (int y = pos_y; y < pos_y + img_height; y++)
                {
                    if (!colorCount[y, x].Equals(new Bgr(0, 0, 0)))
                    {
                        pixCount++;

                        if (totalPicxel / 3 <= pixCount) //일정 픽섹 이상시 색상배열 변경후 종료
                        {
                            color[index]       = true;
                            color_ROI[index].X = x;
                            color_ROI[index].Y = y;
                            color_count++;
                            change_check[index] = false;

                            int margin = 0;
                            ugvList.Add(new UGV("A" + index, glo.TemplateWidth - margin, glo.TemplateHeight - margin, x + 30, y + 30, colorStr[index]));
                            return;
                        }
                    }
                }
            }
        }
        public FingerprintFeatureVector extractFeatureVector(EmguGrayImageInputData input)
        {
            var input_img = input.Image.Clone();

            Image <Gray, byte> grayImg = input_img.Convert <Gray, byte>();
            Image <Gray, byte> binImg  = new Image <Gray, byte>(grayImg.Size);

            // OTSU threshold
            CvInvoke.cvThreshold(grayImg, binImg, 100, 255, Emgu.CV.CvEnum.THRESH.CV_THRESH_OTSU | Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY);

            // INVERT
            binImg = binImg.Not();

            // SKELETONIZATION
            Image <Gray, byte> skel = new Image <Gray, byte>(binImg.Size);

            for (int y = 0; y < skel.Height; y++)
            {
                for (int x = 0; x < skel.Width; x++)
                {
                    skel.Data[y, x, 0] = 0;
                }
            }

            Image <Gray, byte> img = skel.Copy();

            for (int y = 0; y < skel.Height; y++)
            {
                for (int x = 0; x < skel.Width; x++)
                {
                    img.Data[y, x, 0] = binImg.Data[y, x, 0];
                }
            }

            StructuringElementEx element;

            element = new StructuringElementEx(3, 3, 1, 1, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_CROSS);
            Image <Gray, byte> temp;

            bool done = false;

            do
            {
                temp = img.MorphologyEx(element, Emgu.CV.CvEnum.CV_MORPH_OP.CV_MOP_OPEN, 1);
                temp = temp.Not();
                temp = temp.And(img);
                skel = skel.Or(temp);
                img  = img.Erode(1);
                double[] min, max;
                Point[]  pmin, pmax;
                img.MinMax(out min, out max, out pmin, out pmax);
                done = (max[0] == 0);
            } while (!done);

            // RIDGE COUNT
            FingerprintFeatureVector ridgeCounts = new FingerprintFeatureVector();

            ridgeCounts.Data = new int[6];

            for (int i = 1; i <= 5; i++)
            {
                int ridge_count = this.getRidgeCount(i, skel);

                ridgeCounts.Data[i] = ridge_count;
            }

            return(ridgeCounts);
        }
예제 #24
0
        private void ptnProcess_Click(object sender, EventArgs e)
        {
            vehicleType = textBoxvehicleType.Text;
            if (vehicleType == "")
            {
                MessageBox.Show("Please Enter Vehicle Type");
            }
            else
            {
                var filePath = @"data.csv";

                using (var wr = new StreamWriter(filePath, true, System.Text.Encoding.UTF8))
                {
                    var sb = new System.Text.StringBuilder();

                    sb.Append(vehicleType);
                    sb.Append(",");

                    frameGrayOriginal = frame.Convert <Gray, Byte>().PyrDown().PyrUp();
                    Image <Gray, Byte> frameGray = frame.Convert <Gray, Byte>().PyrDown().PyrUp();
                    Image <Gray, Byte> BGGray    = BG.Convert <Gray, Byte>().PyrDown().PyrUp();

                    CvInvoke.cvAbsDiff(BGGray, frameGray, frameGray);

                    OriginalImageBox.Image = frameGray;

                    CvInvoke.cvSmooth(frameGray, frameGray, SMOOTH_TYPE.CV_GAUSSIAN, 15, 15, 3, 1);

                    frameGray = frameGray.ThresholdBinary(new Gray(60), new Gray(255));

                    //OriginalImageBox.Image = frameGrayOriginal;


                    StructuringElementEx rect_6 = new StructuringElementEx(8, 8, 5, 5, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

                    //dilating the source image using the specified structuring element
                    CvInvoke.cvDilate(frameGray, frameGray, rect_6, 6);

                    StructuringElementEx rect_12 = new StructuringElementEx(4, 4, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

                    //Eroding the source image using the specified structuring element
                    CvInvoke.cvErode(frameGray, frameGray, rect_12, 4);
                    resultImage = frameGray.And(frameGrayOriginal);

                    FindLargestObject();

                    TextBox.AppendText("Height : " + r.Height.ToString() + "\n");
                    sb.Append(r.Height);
                    sb.Append(",");
                    TextBox.AppendText("Width : " + r.Width.ToString() + "\n");
                    sb.Append(r.Width);
                    sb.Append(",");
                    TextBox.AppendText("Ratio : " + ((float)r.Width / (float)r.Height).ToString() + "\n");
                    sb.Append((float)r.Width / (float)r.Height);
                    sb.Append(",");


                    Image <Gray, Byte> cannyEdges = resultImage.Canny(150, 80);



                    StructuringElementEx rect_2 = new StructuringElementEx(4, 4, 1, 1, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);
                    CvInvoke.cvDilate(cannyEdges, cannyEdges, rect_2, 1);

                    //OriginalImageBox.Image = cannyEdges;

                    CvInvoke.cvErode(cannyEdges, cannyEdges, rect_2, 1);

                    //OriginalImageBox.Image = cannyEdges;

                    Image <Gray, float> cornerStrength = new Image <Gray, float>(cannyEdges.Size);
                    CvInvoke.cvCornerHarris(
                        cannyEdges,     //source image
                        cornerStrength, //result image
                        3,              //neighborhood size
                        3,              //aperture size
                        0.2);           //Harris parameter
                                        //threshold the corner strengths
                    cornerStrength._ThresholdBinary(new Gray(0.01), new Gray(255));

                    int contCorners = 0;
                    for (int x = 0; x < cornerStrength.Width; x++)
                    {
                        for (int y = 0; y < cornerStrength.Height; y++)
                        {
                            Gray imagenP = cornerStrength[y, x];
                            if (imagenP.Intensity == 255)
                            {
                                contCorners++;
                            }
                        }
                    }

                    sb.Append(contCorners);
                    sb.Append(",");

                    // ProcessImageBox.Image = cornerStrength;

                    LineSegment2D[] lines = cannyEdges.HoughLinesBinary(
                        3,           //Distance resolution in pixel-related units
                        Math.PI / 2, //Angle resolution measured in radians.
                        150,         //threshold
                        80,          //min Line width
                        10           //gap between lines
                        )[0];        //Get the lines from the first channel

                    foreach (LineSegment2D line in lines)
                    {
                        frame.Draw(line, new Bgr(Color.Red), 2);
                        count++;
                    }

                    //TextBox.AppendText("Horizontal Lines : " + count.ToString() + "\n");

                    sb.Append(count);
                    sb.Append(",");

                    count = 0;

                    lines = cannyEdges.HoughLinesBinary(
                        1,       //Distance resolution in pixel-related units
                        Math.PI, //Angle resolution measured in radians.
                        80,      //threshold
                        70,      //min Line width
                        5        //gap between lines
                        )[0];    //Get the lines from the first channel

                    foreach (LineSegment2D line in lines)
                    {
                        frame.Draw(line, new Bgr(Color.White), 2);
                        count++;
                    }
                    // TextBox.AppendText("vertical Lines : " + count.ToString() + "\n");

                    sb.Append(count);
                    sb.Append(",");

                    count = 0;

                    lines = cannyEdges.HoughLinesBinary(
                        3,           //Distance resolution in pixel-related units
                        Math.PI / 4, //Angle resolution measured in radians.
                        150,         //threshold
                        100,         //min Line width
                        8            //gap between lines
                        )[0];        //Get the lines from the first channel

                    foreach (LineSegment2D line in lines)
                    {
                        frame.Draw(line, new Bgr(Color.LightPink), 2);
                        count++;
                    }
                    // TextBox.AppendText("30 Lines : " + count.ToString() + "\n");

                    sb.Append(count);
                    sb.Append(",");

                    count = 0;

                    lines = cannyEdges.HoughLinesBinary(
                        3,           //Distance resolution in pixel-related units
                        Math.PI / 3, //Angle resolution measured in radians.
                        50,          //threshold
                        100,         //min Line width
                        8            //gap between lines
                        )[0];        //Get the lines from the first channel

                    foreach (LineSegment2D line in lines)
                    {
                        frame.Draw(line, new Bgr(Color.LightPink), 2);
                        count++;
                    }
                    // TextBox.AppendText("60 Lines : " + count.ToString() + "\n");

                    sb.Append(count);
                    sb.Append(",");

                    count = 0;

                    lines = cannyEdges.HoughLinesBinary(
                        3,           //Distance resolution in pixel-related units
                        Math.PI / 6, //Angle resolution measured in radians.
                        50,          //threshold
                        100,         //min Line width
                        8            //gap between lines
                        )[0];        //Get the lines from the first channel

                    foreach (LineSegment2D line in lines)
                    {
                        frame.Draw(line, new Bgr(Color.LightPink), 2);
                        count++;
                    }
                    // TextBox.AppendText("45 Lines : " + count.ToString() + "\n");

                    sb.Append(count);
                    sb.Append(",");

                    List <MCvBox2D> boxList1 = new List <MCvBox2D>();
                    List <MCvBox2D> boxList2 = new List <MCvBox2D>();
                    List <MCvBox2D> boxList3 = new List <MCvBox2D>();
                    List <MCvBox2D> boxList4 = new List <MCvBox2D>();
                    List <MCvBox2D> boxList5 = new List <MCvBox2D>();
                    List <MCvBox2D> boxList6 = new List <MCvBox2D>();

                    List <Triangle2DF> triangleList1 = new List <Triangle2DF>();
                    List <Triangle2DF> triangleList2 = new List <Triangle2DF>();
                    List <Triangle2DF> triangleList3 = new List <Triangle2DF>();
                    List <Triangle2DF> triangleList4 = new List <Triangle2DF>();
                    List <Triangle2DF> triangleList5 = new List <Triangle2DF>();

                    List <Contour <Point> > pentagonList1 = new List <Contour <Point> >();
                    List <Contour <Point> > pentagonList2 = new List <Contour <Point> >();
                    List <Contour <Point> > pentagonList3 = new List <Contour <Point> >();
                    List <Contour <Point> > pentagonList4 = new List <Contour <Point> >();
                    List <Contour <Point> > pentagonList5 = new List <Contour <Point> >();
                    List <Contour <Point> > pentagonList6 = new List <Contour <Point> >();

                    using (MemStorage storage = new MemStorage())
                        for (Contour <Point> contours = cannyEdges.FindContours(); contours != null; contours = contours.HNext)
                        {
                            Contour <Point> currentContour = contours.ApproxPoly(contours.Perimeter * 0.05, storage);

                            if (currentContour.Total == 3) //The contour has 3 vertices, it is a triangle
                            {
                                if (contours.Area > 30 && contours.Area <= 100)
                                {
                                    Point[] pts_1 = currentContour.ToArray();
                                    triangleList1.Add(new Triangle2DF(pts_1[0], pts_1[1], pts_1[2]));
                                }
                                else if (contours.Area <= 200)
                                {
                                    Point[] pts_2 = currentContour.ToArray();
                                    triangleList2.Add(new Triangle2DF(pts_2[0], pts_2[1], pts_2[2]));
                                }
                                else if (contours.Area <= 300)
                                {
                                    Point[] pts_3 = currentContour.ToArray();
                                    triangleList3.Add(new Triangle2DF(pts_3[0], pts_3[1], pts_3[2]));
                                }
                                else if (contours.Area <= 400)
                                {
                                    Point[] pts_4 = currentContour.ToArray();
                                    triangleList4.Add(new Triangle2DF(pts_4[0], pts_4[1], pts_4[2]));
                                }
                                else
                                {
                                    Point[] pts_5 = currentContour.ToArray();
                                    triangleList5.Add(new Triangle2DF(pts_5[0], pts_5[1], pts_5[2]));
                                }
                            }
                            else if (currentContour.Total == 4)
                            {
                                if (contours.Area > 30 && contours.Area < 50)
                                {
                                    bool            isRectangle = true;
                                    Point[]         pts1        = currentContour.ToArray();
                                    LineSegment2D[] edges1      = PointCollection.PolyLine(pts1, true);
                                    for (int i = 0; i < edges1.Length; i++)
                                    {
                                        double angle = Math.Abs(edges1[(i + 1) % edges1.Length].GetExteriorAngleDegree(edges1[i]));

                                        if (angle < 88 || angle > 92)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                        if (isRectangle)
                                        {
                                            boxList1.Add(currentContour.GetMinAreaRect());
                                        }
                                    }
                                }
                                else if (contours.Area <= 100)
                                {
                                    bool            isRectangle = true;
                                    Point[]         pts2        = currentContour.ToArray();
                                    LineSegment2D[] edges2      = PointCollection.PolyLine(pts2, true);
                                    for (int i = 0; i < edges2.Length; i++)
                                    {
                                        double angle = Math.Abs(edges2[(i + 1) % edges2.Length].GetExteriorAngleDegree(edges2[i]));

                                        if (angle < 88 || angle > 92)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                        if (isRectangle)
                                        {
                                            boxList2.Add(currentContour.GetMinAreaRect());
                                        }
                                    }
                                }
                                else if (contours.Area <= 200)
                                {
                                    bool            isRectangle = true;
                                    Point[]         pts3        = currentContour.ToArray();
                                    LineSegment2D[] edges3      = PointCollection.PolyLine(pts3, true);
                                    //using edges i found coordinates.
                                    for (int i = 0; i < edges3.Length; i++)
                                    {
                                        double angle = Math.Abs(edges3[(i + 1) % edges3.Length].GetExteriorAngleDegree(edges3[i]));

                                        if (angle < 88 || angle > 92)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                        if (isRectangle)
                                        {
                                            boxList3.Add(currentContour.GetMinAreaRect());
                                        }
                                    }
                                }
                                else if (contours.Area <= 300)
                                {
                                    bool            isRectangle = true;
                                    Point[]         pts4        = currentContour.ToArray();
                                    LineSegment2D[] edges4      = PointCollection.PolyLine(pts4, true);
                                    //using edges i found coordinates.
                                    for (int i = 0; i < edges4.Length; i++)
                                    {
                                        double angle = Math.Abs(edges4[(i + 1) % edges4.Length].GetExteriorAngleDegree(edges4[i]));

                                        if (angle < 88 || angle > 92)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                        if (isRectangle)
                                        {
                                            boxList4.Add(currentContour.GetMinAreaRect());
                                        }
                                    }
                                }
                                else if (contours.Area <= 400)
                                {
                                    bool            isRectangle = true;
                                    Point[]         pts5        = currentContour.ToArray();
                                    LineSegment2D[] edges5      = PointCollection.PolyLine(pts5, true);
                                    //using edges i found coordinates.
                                    for (int i = 0; i < edges5.Length; i++)
                                    {
                                        double angle = Math.Abs(edges5[(i + 1) % edges5.Length].GetExteriorAngleDegree(edges5[i]));

                                        if (angle < 88 || angle > 92)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                        if (isRectangle)
                                        {
                                            boxList5.Add(currentContour.GetMinAreaRect());
                                        }
                                    }
                                }
                                else
                                {
                                    bool            isRectangle = true;
                                    Point[]         pts6        = currentContour.ToArray();
                                    LineSegment2D[] edges6      = PointCollection.PolyLine(pts6, true);
                                    //using edges i found coordinates.
                                    for (int i = 0; i < edges6.Length; i++)
                                    {
                                        double angle = Math.Abs(edges6[(i + 1) % edges6.Length].GetExteriorAngleDegree(edges6[i]));

                                        if (angle < 88 || angle > 92)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                        if (isRectangle)
                                        {
                                            boxList6.Add(currentContour.GetMinAreaRect());
                                        }
                                    }
                                }
                            }

                            else if (currentContour.Total == 5)
                            {
                                if (contours.Area > 20 && contours.Area <= 50)
                                {
                                    pentagonList1.Add(currentContour);      // Add to list of pentagons

                                    Point[] ptPoints1 = contours.ToArray(); // Get contour points
                                    frame.Draw(contours, new Bgr(Color.Purple), 2);
                                    int     dotX = (ptPoints1[0].X + ptPoints1[1].X + ptPoints1[2].X + ptPoints1[3].X + ptPoints1[4].X) / 5;
                                    int     dotY = (ptPoints1[0].Y + ptPoints1[1].Y + ptPoints1[2].Y + ptPoints1[3].Y + ptPoints1[4].Y) / 5;
                                    CircleF dot  = new CircleF(new PointF(dotX, dotY), 3);
                                    frame.Draw(dot, new Bgr(Color.Purple), -1);
                                }
                                else if (contours.Area <= 100)
                                {
                                    pentagonList2.Add(currentContour);

                                    Point[] ptPoints2 = contours.ToArray(); // Get contour points
                                    frame.Draw(contours, new Bgr(Color.Purple), 2);
                                    int     dotX = (ptPoints2[0].X + ptPoints2[1].X + ptPoints2[2].X + ptPoints2[3].X + ptPoints2[4].X) / 5;
                                    int     dotY = (ptPoints2[0].Y + ptPoints2[1].Y + ptPoints2[2].Y + ptPoints2[3].Y + ptPoints2[4].Y) / 5;
                                    CircleF dot  = new CircleF(new PointF(dotX, dotY), 3);
                                    frame.Draw(dot, new Bgr(Color.Purple), -1);
                                }
                                else if (contours.Area <= 200)
                                {
                                    pentagonList3.Add(currentContour);

                                    Point[] ptPoints3 = contours.ToArray(); // Get contour points
                                    frame.Draw(contours, new Bgr(Color.Purple), 2);
                                    int     dotX = (ptPoints3[0].X + ptPoints3[1].X + ptPoints3[2].X + ptPoints3[3].X + ptPoints3[4].X) / 5;
                                    int     dotY = (ptPoints3[0].Y + ptPoints3[1].Y + ptPoints3[2].Y + ptPoints3[3].Y + ptPoints3[4].Y) / 5;
                                    CircleF dot  = new CircleF(new PointF(dotX, dotY), 3);
                                    frame.Draw(dot, new Bgr(Color.Purple), -1);
                                }
                                else if (contours.Area <= 300)
                                {
                                    pentagonList4.Add(currentContour);

                                    Point[] ptPoints4 = contours.ToArray(); // Get contour points
                                    frame.Draw(contours, new Bgr(Color.Purple), 2);
                                    int     dotX = (ptPoints4[0].X + ptPoints4[1].X + ptPoints4[2].X + ptPoints4[3].X + ptPoints4[4].X) / 5;
                                    int     dotY = (ptPoints4[0].Y + ptPoints4[1].Y + ptPoints4[2].Y + ptPoints4[3].Y + ptPoints4[4].Y) / 5;
                                    CircleF dot  = new CircleF(new PointF(dotX, dotY), 3);
                                    frame.Draw(dot, new Bgr(Color.Purple), -1);
                                }
                                else if (contours.Area <= 400)
                                {
                                    pentagonList5.Add(currentContour);

                                    Point[] ptPoints5 = contours.ToArray(); // Get contour points
                                    frame.Draw(contours, new Bgr(Color.Purple), 2);
                                    int     dotX = (ptPoints5[0].X + ptPoints5[1].X + ptPoints5[2].X + ptPoints5[3].X + ptPoints5[4].X) / 5;
                                    int     dotY = (ptPoints5[0].Y + ptPoints5[1].Y + ptPoints5[2].Y + ptPoints5[3].Y + ptPoints5[4].Y) / 5;
                                    CircleF dot  = new CircleF(new PointF(dotX, dotY), 3);
                                    frame.Draw(dot, new Bgr(Color.Purple), -1);
                                }
                                else
                                {
                                    pentagonList6.Add(currentContour);

                                    Point[] ptPoints6 = contours.ToArray(); // Get contour points
                                    frame.Draw(contours, new Bgr(Color.Purple), 2);
                                    int     dotX = (ptPoints6[0].X + ptPoints6[1].X + ptPoints6[2].X + ptPoints6[3].X + ptPoints6[4].X) / 5;
                                    int     dotY = (ptPoints6[0].Y + ptPoints6[1].Y + ptPoints6[2].Y + ptPoints6[3].Y + ptPoints6[4].Y) / 5;
                                    CircleF dot  = new CircleF(new PointF(dotX, dotY), 3);
                                    frame.Draw(dot, new Bgr(Color.Purple), -1);
                                }
                            }
                        }

                    count = 0;
                    foreach (Contour <Point> pentagon in pentagonList1)
                    {
                        frame.Draw(pentagon, new Bgr(Color.Purple), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Contour <Point> pentagon in pentagonList2)
                    {
                        frame.Draw(pentagon, new Bgr(Color.Purple), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Contour <Point> pentagon in pentagonList3)
                    {
                        frame.Draw(pentagon, new Bgr(Color.Purple), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Contour <Point> pentagon in pentagonList4)
                    {
                        frame.Draw(pentagon, new Bgr(Color.Purple), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Contour <Point> pentagon in pentagonList5)
                    {
                        frame.Draw(pentagon, new Bgr(Color.Purple), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Contour <Point> pentagon in pentagonList6)
                    {
                        frame.Draw(pentagon, new Bgr(Color.Purple), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (MCvBox2D box in boxList1)
                    {
                        frame.Draw(box, new Bgr(Color.Purple), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (MCvBox2D box in boxList2)
                    {
                        frame.Draw(box, new Bgr(Color.LightBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (MCvBox2D box in boxList3)
                    {
                        frame.Draw(box, new Bgr(Color.LightBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (MCvBox2D box in boxList4)
                    {
                        frame.Draw(box, new Bgr(Color.LightBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (MCvBox2D box in boxList5)
                    {
                        frame.Draw(box, new Bgr(Color.LightBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Triangle2DF triangle in triangleList1)
                    {
                        frame.Draw(triangle, new Bgr(Color.DarkBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Triangle2DF triangle in triangleList2)
                    {
                        frame.Draw(triangle, new Bgr(Color.DarkBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Triangle2DF triangle in triangleList3)
                    {
                        frame.Draw(triangle, new Bgr(Color.DarkBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Triangle2DF triangle in triangleList4)
                    {
                        frame.Draw(triangle, new Bgr(Color.DarkBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;
                    foreach (Triangle2DF triangle in triangleList5)
                    {
                        frame.Draw(triangle, new Bgr(Color.DarkBlue), 2);
                        count++;
                    }
                    sb.Append(count);
                    sb.Append(",");

                    count = 0;

                    Gray cannyThreshold             = new Gray(100);
                    Gray circleAccumulatorThreshold = new Gray(70);

                    CircleF[] circles = frameGrayOriginal.HoughCircles(
                        cannyThreshold,
                        circleAccumulatorThreshold,
                        2.0,                   //Resolution of the accumulator used to detect centers of the circles
                        160.0,                 //min distance
                        20,                    //min radius
                        50                     //max radius
                        )[0];                  //Get the circles from the first channel

                    float rad = 0;
                    foreach (CircleF circle in circles)
                    {
                        //frame.Draw(circle, new Bgr(Color.Brown), 2);
                        TextBox.AppendText("Radius : " + circle.Radius.ToString() + "\n");
                        rad = rad + circle.Radius;
                    }

                    sb.Append(circles[0].Radius);
                    sb.Append(",");
                    sb.Append(circles[1].Radius);
                    sb.Append(",");
                    double dist = Math.Sqrt(Math.Pow(circles[0].Center.X - circles[1].Center.X, 2) + Math.Pow(circles[0].Center.Y - circles[1].Center.Y, 2));
                    sb.Append(dist);
                    sb.Append(",");
                    //TextBox.AppendText("circles : " + circles.Length.ToString() + "\n");
                    sb.Append(circles.Length);
                    sb.Append(",");
                    //TextBox.AppendText("Avg radius : " + (rad / circles.Length).ToString() + "\n");
                    sb.Append(rad / circles.Length);

                    ProcessImageBox.Image = frame;

                    wr.WriteLine(sb.ToString());
                }
            }
        }
        void ProcessFramAndUpdateGUI(object Sender, EventArgs agr)
        {
            string[] filePaths = Directory.GetFiles(filepath);

            int    Finger_num = 0;
            Double Result1    = 0;
            Double Result2    = 0;

            imgOrignal = CapWebCam.QueryFrame();

            if (imgOrignal == null)
            {
                return;
            }
            //Applying YCrCb filter
            Image <Ycc, Byte>  currentYCrCbFrame = imgOrignal.Convert <Ycc, byte>();
            Image <Gray, byte> skin = new Image <Gray, byte>(imgOrignal.Width, imgOrignal.Height);

            skin = currentYCrCbFrame.InRange(new Ycc(0, 131, 80), new Ycc(255, 185, 135));

            StructuringElementEx rect_12 = new StructuringElementEx(10, 10, 5, 5, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            //Eroding the source image using the specified structuring element
            CvInvoke.cvErode(skin, skin, rect_12, 1);

            StructuringElementEx rect_6 = new StructuringElementEx(6, 6, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);

            //dilating the source image using the specified structuring element
            CvInvoke.cvDilate(skin, skin, rect_6, 2);

            skin = skin.Flip(FLIP.HORIZONTAL);
            //smoothing the filterd , eroded and dilated image.
            skin = skin.SmoothGaussian(9);

            imgOrignal = imgOrignal.Flip(FLIP.HORIZONTAL);
            //extracting contours.
            Contour <Point> contours = skin.FindContours();

            Contour <Point> biggestContour = null;

            //extracting the biggest contour.
            while (contours != null)
            {
                Result1 = contours.Area;
                if (Result1 > Result2)
                {
                    Result2        = Result1;
                    biggestContour = contours;
                }
                contours = contours.HNext;
            }
            //applying convexty defect allgoritm to find the count of fingers
            if (biggestContour != null)
            {
                Finger_num = 0;

                biggestContour = biggestContour.ApproxPoly((0.00025));
                imgOrignal.Draw(biggestContour, new Bgr(Color.LimeGreen), 2);

                Hull    = biggestContour.GetConvexHull(ORIENTATION.CV_CLOCKWISE);
                defects = biggestContour.GetConvexityDefacts(storage, ORIENTATION.CV_CLOCKWISE);
                imgOrignal.DrawPolyline(Hull.ToArray(), true, new Bgr(0, 0, 256), 2);

                box = biggestContour.GetMinAreaRect();

                defectArray = defects.ToArray();

                for (int i = 0; i < defects.Total; i++)
                {
                    PointF startPoint = new PointF((float)defectArray[i].StartPoint.X,
                                                   (float)defectArray[i].StartPoint.Y);

                    PointF depthPoint = new PointF((float)defectArray[i].DepthPoint.X,
                                                   (float)defectArray[i].DepthPoint.Y);

                    PointF endPoint = new PointF((float)defectArray[i].EndPoint.X,
                                                 (float)defectArray[i].EndPoint.Y);


                    CircleF startCircle = new CircleF(startPoint, 5f);
                    CircleF depthCircle = new CircleF(depthPoint, 5f);
                    CircleF endCircle   = new CircleF(endPoint, 5f);


                    if ((startCircle.Center.Y < box.center.Y || depthCircle.Center.Y < box.center.Y) &&
                        (startCircle.Center.Y < depthCircle.Center.Y) &&
                        (Math.Sqrt(Math.Pow(startCircle.Center.X - depthCircle.Center.X, 2) +
                                   Math.Pow(startCircle.Center.Y - depthCircle.Center.Y, 2)) >
                         box.size.Height / 6.5))
                    {
                        Finger_num++;
                    }
                }

                label2.Text = Finger_num.ToString();            // updating finger count
            }

            // Finding the center of contour

            MCvMoments moment = new MCvMoments();               // a new MCvMoments object

            try
            {
                moment = biggestContour.GetMoments();           // Moments of biggestContour
            }
            catch (NullReferenceException except)
            {
                //label3.Text = except.Message;
                return;
            }

            CvInvoke.cvMoments(biggestContour, ref moment, 0);

            double m_00 = CvInvoke.cvGetSpatialMoment(ref moment, 0, 0);
            double m_10 = CvInvoke.cvGetSpatialMoment(ref moment, 1, 0);
            double m_01 = CvInvoke.cvGetSpatialMoment(ref moment, 0, 1);

            int current_X = Convert.ToInt32(m_10 / m_00) / 10;      // X location of centre of contour
            int current_Y = Convert.ToInt32(m_01 / m_00) / 10;      // Y location of center of contour

            // transfer control to webcam only if button has already been clicked

            if (button_pressed)
            {
                if (Finger_num == 0 || Finger_num == 1)
                {
                    Cursor.Position = new Point(current_X * 20, current_Y * 20);
                }



                if (Finger_num >= 4)
                {
                }
            }

            iborignal.Image = imgOrignal;


            Image <Bgr, Byte>  currentFrame;
            Image <Gray, Byte> sourceImage = null;

            for (int tu = 0; tu < filePaths.Length; tu++)
            {
                if (filePaths[tu].Contains(".bmp"))
                {
                    getfilename(System.IO.Path.GetFileName(filePaths[tu]));

                    sourceImage = CapWebCam.QueryFrame().Convert <Gray, Byte>();
                    Image <Gray, Byte> templateImage = new Image <Gray, Byte>(filePaths[tu]);

                    Image <Gray, float> result = sourceImage.MatchTemplate(templateImage, Emgu.CV.CvEnum.TM_TYPE.CV_TM_CCOEFF_NORMED);
                    //  iborignal.Image = result;
                    double[] min, max;
                    Point[]  pointMin, pointMax;
                    templateImage.MinMax(out min, out max, out pointMin, out pointMax);
                    float[, ,] matches = result.Data;


                    for (int x = 0; x < matches.GetLength(0); x++)
                    {
                        for (int y = 0; y < matches.GetLength(1); y++)
                        {
                            double matchScore = matches[x, y, 0];
                            Console.WriteLine(matchScore);

                            label1.Text = Convert.ToString(matchScore).ToString();
                            // label4.Text = filePaths[tu];

                            if (matchScore > (0.40))
                            {
                                CodeClass db = new CodeClass();
                                db.ConnectToDatabase();
                                DataTable dt = db.GetTable("Select * from tbldata where FilePath='" + filePaths[tu] + "'");
                                if (dt.Rows.Count > 0)
                                {
                                    richTextBox1.Text = dt.Rows[0][0].ToString();
                                    richTextBox2.Text = dt.Rows[0][1].ToString();

                                    Console.WriteLine(matchScore);
                                    reader = new SpeechSynthesizer();
                                    reader.SpeakAsync(richTextBox2.Text);
                                    Rectangle rect = new Rectangle(new Point(x, y), new Size(1, 1));

                                    imgOrignal.Draw(rect, new Bgr(Color.Blue), 1);
                                }
                            }
                            else
                            {
                                richTextBox1.Text = "";
                                richTextBox2.Text = "";
                            }
                        }
                    }
                }
            }
        }
예제 #26
0
        public bool detectOverall(Image <Gray, byte> img, Image <Gray, byte> prev)
        {
            Rectangle imgROI = new Rectangle(0, 0, 0, 0);

            if (img.IsROISet)
            {
                imgROI = img.ROI;
                CvInvoke.cvResetImageROI(img);
            }

            Rectangle prevROI = new Rectangle(0, 0, 0, 0);

            if (prev.IsROISet)
            {
                prevROI = prev.ROI;
                CvInvoke.cvResetImageROI(prev);
            }

            // step 1: compute diff = img - prev
            Image <Gray, byte> diff = new Image <Gray, byte>(img.Width, img.Height);

            CvInvoke.cvSub(img, prev, diff, IntPtr.Zero);

            if (img.IsROISet)
            {
                CvInvoke.cvSetImageROI(img, imgROI);
            }
            if (prev.IsROISet)
            {
                CvInvoke.cvSetImageROI(prev, prevROI);
            }

            // step 2: threshold
            CvInvoke.cvThreshold(diff, diff, 100, 255, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY);

            // step 3: do close and open operations
            StructuringElementEx element = new StructuringElementEx(5, 5, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_ELLIPSE);

            CvInvoke.cvMorphologyEx(diff, diff, IntPtr.Zero, element, Emgu.CV.CvEnum.CV_MORPH_OP.CV_MOP_CLOSE, 1);
            CvInvoke.cvMorphologyEx(diff, diff, IntPtr.Zero, element, Emgu.CV.CvEnum.CV_MORPH_OP.CV_MOP_OPEN, 3);

            // step 4: compute connection regions
            IntPtr     comp    = new IntPtr();
            MemStorage storage = new MemStorage();
            int        nc      = CvInvoke.cvFindContours(diff, storage, ref comp, StructSize.MCvContour,
                                                         Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_CCOMP, Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, new Point(0, 0));

            // only when the number of connection regions equals 2, a blink is detected.
            if (nc != 2)
            {
                return(false);
            }

            Seq <Point> compSeq = new Seq <Point>(comp, null);
            Rectangle   r1      = CvInvoke.cvBoundingRect(compSeq, 1);
            Rectangle   r2      = CvInvoke.cvBoundingRect(compSeq.HNext, 1);

            // Eliminate interference
            if (Math.Abs(r1.Y - r2.Y) > 20 || Math.Abs(r1.Width - r2.Width) > 20 || Math.Abs(r1.Height - r2.Height) > 20)
            {
                return(false);
            }

            // Eliminate the possibility of noses pair
            if (Math.Abs(r1.X - r2.X) < 200)
            {
                return(false);
            }

            return(true);
        }
예제 #27
0
        public bool detectShook(Image <Gray, byte> img, Image <Gray, byte> prev)
        {
            // if the ROI of current image is setted up, then save the ROI and reset it
            Rectangle imgROI = new Rectangle(0, 0, 0, 0);

            if (img.IsROISet)
            {
                imgROI = img.ROI;
                CvInvoke.cvResetImageROI(img);
            }

            // if the ROI of previous image is setted up, then save the ROI and reset it
            Rectangle prevROI = new Rectangle(0, 0, 0, 0);

            if (prev.IsROISet)
            {
                prevROI = prev.ROI;
                CvInvoke.cvResetImageROI(prev);
            }


            Image <Gray, byte> diff = new Image <Gray, byte>(img.Width, img.Height);

            CvInvoke.cvSub(img, prev, diff, IntPtr.Zero);

            // if the ROI of current iamge and previous image are resetted up, then set them again to keep unchangged by the method
            if (img.IsROISet)
            {
                CvInvoke.cvSetImageROI(img, imgROI);
            }
            if (prev.IsROISet)
            {
                CvInvoke.cvSetImageROI(prev, prevROI);
            }

            CvInvoke.cvThreshold(diff, diff, 50, 255, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY);

            StructuringElementEx element = new StructuringElementEx(5, 5, 3, 3, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_ELLIPSE);

            //CvInvoke.cvMorphologyEx(diff, diff, IntPtr.Zero, element, Emgu.CV.CvEnum.CV_MORPH_OP.CV_MOP_CLOSE, 1);
            CvInvoke.cvMorphologyEx(diff, diff, IntPtr.Zero, element, Emgu.CV.CvEnum.CV_MORPH_OP.CV_MOP_OPEN, 7);


            IntPtr     comp    = new IntPtr();
            MemStorage storage = new MemStorage();
            int        nc      = CvInvoke.cvFindContours(diff, storage, ref comp, StructSize.MCvContour,
                                                         Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_CCOMP, Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, new Point(0, 0));

            // when the numbers of connection regions are two huge, then a shaking is detected
            if (nc > 8)
            {
                return(true);
            }

            // also, when one of the connected regions has too big width or too big height, then a shaking is detected
            Seq <Point> compSeq = new Seq <Point>(comp, null);

            while (compSeq != null && comp.ToInt32() != 0)
            {
                Rectangle r = CvInvoke.cvBoundingRect(compSeq, 1);

                if (r.Width >= 200 || r.Height >= 120 || Math.Abs(r.Width - r.Height) >= 100)
                {
                    return(true);
                }

                compSeq = compSeq.HNext;
            }

            return(false);
        }
예제 #28
0
파일: Form1.cs 프로젝트: hoh5hc/DeltaRobot
        private void imagezone()
        {
            int  i = 0;
            byte j = 0;

            try
            {
                capture = new Capture();
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }
            Thread.Sleep(2000);


            while (true)
            {
                Image <Bgr, Byte> image  = capture.QueryFrame(); //line 1
                Image <Hsv, Byte> imgHsv = image.Convert <Hsv, Byte>();
                // ImageFrame = ImageFrame.Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);


                for (i = 0; i < 3; i++)
                {
                    Image <Gray, byte> imgthres = new Image <Gray, byte>(imgHsv.Size);
                    if (i == 0)
                    {
                        red(imgHsv, out imgthres);
                    }
                    else if (i == 1)
                    {
                        blue(imgHsv, out imgthres);
                    }
                    else if (i == 2)
                    {
                        green(imgHsv, out imgthres);
                    }

                    //erode and dilate
                    StructuringElementEx element = new StructuringElementEx(5, 5, 1, 1, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_CROSS);
                    CvInvoke.cvErode(imgthres, imgthres, element, 1);
                    element = new StructuringElementEx(5, 5, 1, 1, Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_CROSS);
                    CvInvoke.cvDilate(imgthres, imgthres, element, 1);
                    //
                    //contour


                    using (MemStorage storage = new MemStorage()) //allocate storage for contour approximation
                        for (Contour <Point> contours = imgthres.FindContours(); contours != null; contours = contours.HNext)
                        {
                            if (contours.Area > 400) //only consider contours with area greater than 250
                            {
                                j++;
                                image.Draw(contours, new Bgr(Color.Gold), 4);
                            }
                        }

                    img_color[i] = j;
                    j            = 0;
                }


                cam.Image = image;
            }
        }