示例#1
0
        private void ProcessImage()
        {
            while (_capture.IsOpened)
            {
                // frame maintenance
                Mat sourceFrame = _capture.QueryFrame();
                // resize to PictureBox aspect ratio
                int  newHeight = sourceFrame.Size.Height * pictureBox1.Size.Width / sourceFrame.Size.Width;
                Size newSize   = new Size(pictureBox1.Size.Width, newHeight);
                CvInvoke.Resize(sourceFrame, sourceFrame, newSize);
                Point  targetPointForArduino = new Point();
                string targetShapeForArduino = "";

                // clone for all
                Invoke(new Action(() =>
                {
                    pictureBox1.Image = sourceFrame.Clone().Bitmap;
                }));

                // binary magic
                var binaryImage = sourceFrame.ToImage <Gray, byte>().ThresholdBinary(new Gray(100), new Gray(255)).Mat;

                // put some
                var decoratedImage = new Mat();

                CvInvoke.CvtColor(binaryImage, decoratedImage, typeof(Gray), typeof(Bgr));

                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    // Build list of contours
                    CvInvoke.FindContours(binaryImage, contours, null, RetrType.List,
                                          ChainApproxMethod.ChainApproxSimple);
                    int shapesFound    = 0;
                    int squaresFound   = 0;
                    int trianglesFound = 0;
                    targetPointForArduino = new Point();

                    for (int i = 0; i < contours.Size; i++)
                    {
                        VectorOfPoint contour = contours[i];
                        double        area    = CvInvoke.ContourArea(contour);
                        if (area >= 1500 && area <= 6000)
                        {
                            shapesFound++;
                            Rectangle boundingBox   = CvInvoke.BoundingRectangle(contour);
                            Point     point         = new Point(boundingBox.X + (boundingBox.Width / 2), boundingBox.Y + (boundingBox.Height / 2));
                            Point     arduinoTarget = new Point(point.X / 22, point.Y / 17);
                            string    shape         = "";

                            if (area > 3200)
                            {
                                CvInvoke.Polylines(decoratedImage, contour, true, new Bgr(Color.Red).MCvScalar);

                                CvInvoke.Circle(decoratedImage, point, 1, new Bgr(Color.Red).MCvScalar);

                                shape = "S";

                                squaresFound++;
                            }
                            else
                            {
                                CvInvoke.Polylines(decoratedImage, contour, true, new Bgr(Color.Blue).MCvScalar);

                                CvInvoke.Circle(decoratedImage, point, 1, new Bgr(Color.Blue).MCvScalar);

                                shape = "T";

                                trianglesFound++;
                            }

                            if (shapesFound >= 1)
                            {
                                Invoke(new Action(() =>
                                {
                                    coordLabel.Text       = $"coordinats {arduinoTarget.X},{arduinoTarget.Y} ";
                                    targetPointForArduino = point;
                                    targetShapeForArduino = shape;
                                }));
                            }
                        }
                    }


                    Invoke(new Action(() =>
                    {
                        contureLable.Text = $"There are {shapesFound} contours detected";

                        squaresLable.Text = $"There are {squaresFound} squares detected";

                        trianglesLabel.Text = $"There are {trianglesFound} triangles detected";
                    }));
                }

                // output images:
                pictureBox2.Image = decoratedImage.Bitmap;

                // send coordinates to arduino if x & y do not equal zero
                if (readyForNewShape && targetPointForArduino.X != 0 && targetPointForArduino.Y != 0)
                {
                    // send coordinate
                    byte[] buffer = new byte[5] {
                        Encoding.ASCII.GetBytes("<")[0],
                        Convert.ToByte(targetPointForArduino.X),
                        Convert.ToByte(targetPointForArduino.Y),
                        Encoding.ASCII.GetBytes(targetShapeForArduino)[0],
                        Encoding.ASCII.GetBytes(">")[0]
                    };
                    arduinoSerial.Write(buffer, 0, 5);

                    // reset var to false:
                    readyForNewShape = false;
                }
            }
        }
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = new Mat();

            capture.Retrieve(frame, 0);
            //preprocessing
            Image <Bgr, byte>  finalImg      = frame.ToImage <Bgr, byte>().Flip(FlipType.Horizontal);
            Image <Gray, byte> processingImg = finalImg.Convert <Gray, byte>();

            BiTonalLevel.Dispatcher.BeginInvoke(new Action(() =>
            {
                if (BiTonalLevel.Value > 0)
                {
                    processingImg = processingImg.ThresholdBinary(new Gray(BiTonalLevel.Value), new Gray(255));
                }
            }));
            BlurLevel.Dispatcher.BeginInvoke(new Action(() =>
            {
                if (BlurLevel.Value > 1)
                {
                    CvInvoke.Blur(processingImg, processingImg, new System.Drawing.Size((int)BlurLevel.Value, (int)BlurLevel.Value), new System.Drawing.Point(-1, -1));
                }
            }));
            //morphological processing
            processingImg.MorphologyEx(firstMorphOp, kernel, new System.Drawing.Point(-1, -1), firstMorphSteps, BorderType.Default, new MCvScalar());
            if (doubleMorph)
            {
                processingImg.MorphologyEx(secondMorphOp, kernel2, new System.Drawing.Point(-1, -1), secondMorphSteps, BorderType.Default, new MCvScalar());
            }
            ProcessingVideoBox.Dispatcher.BeginInvoke(new Action(() => ProcessingVideoBox.Source = ToBitmapGrey(processingImg)));
            //edge detection
            Mat edges = new Mat(frame.Size, frame.Depth, 1);

            CvInvoke.Canny(processingImg, edges, lowerTresholdLevel, upperTresholdLevel, cannyKernelSize);
            //contours finding
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat    hierarchy             = new Mat();
            int    largest_contour_index = 0;
            double largest_area          = 0;

            CvInvoke.FindContours(edges, contours, hierarchy, contouringMode, contouringMethod);
            for (int i = 0; i < contours.Size; i++)
            {
                double a = CvInvoke.ContourArea(contours[i], false);
                if (a > largest_area)
                {
                    largest_area          = a;
                    largest_contour_index = i;
                }
            }
            CvInvoke.DrawContours(finalImg, contours, largest_contour_index, redColor, 3, LineType.EightConnected, hierarchy);
            //defects points finding
            VectorOfInt hull    = new VectorOfInt();
            Mat         defects = new Mat();

            if (contours.Size > 0)
            {
                VectorOfPoint largestContour = new VectorOfPoint(contours[largest_contour_index].ToArray());
                CvInvoke.ConvexHull(largestContour, hull, false, true);
                CvInvoke.ConvexityDefects(largestContour, hull, defects);
                if (!defects.IsEmpty)
                {
                    Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels);
                    defects.CopyTo(m);
                    Matrix <int>[] channels = m.Split();
                    for (int i = 1; i < defects.Rows; ++i)
                    {
                        finalImg.Draw(new System.Drawing.Point[] { largestContour[channels[0][i, 0]], largestContour[channels[1][i, 0]] }, new Bgr(100, 255, 100), 2);
                        CvInvoke.Circle(finalImg, new System.Drawing.Point(largestContour[channels[0][i, 0]].X, largestContour[channels[0][i, 0]].Y), 7, new MCvScalar(255, 0, 0), -1);
                    }
                }
            }
            MainVideoBox.Dispatcher.BeginInvoke(new Action(() => MainVideoBox.Source = ToBitmapFinal(finalImg)));
        }
//findboardfunction
        private void findboard(Mat threshold2, Mat HSV, Image <Bgr, byte> Frame)
        {
            //create temporary image

            Mat temp = new Mat();

            threshold2.CopyTo(temp);

            //set gboard paramaters

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            //find contours of filtered image using openCV findContours function
            CvInvoke.FindContours(temp, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
            int count = contours.Size;

            for (int z = 0; z < count; z++)
            {
                using (VectorOfPoint contour = contours[z])
                    using (VectorOfPoint approxContour = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                        if (CvInvoke.ContourArea(approxContour, false) > 800) //only consider contours with area greater than 400
                        {
                            //check to see if board is found
                            if (approxContour.Size == 4)
                            {
                                //find the length of each square and set their size ((board perimeter / 4) / 3)
                                for (int i = 0; i <= 2; i++)
                                {
                                    for (int j = 0; j <= 2; j++)
                                    {
                                        gboard[i, j].setsize(Convert.ToInt32((CvInvoke.ArcLength(contours[z], false)) / 12));
                                        BOARD_SIZE = (Convert.ToInt32((CvInvoke.ArcLength(contours[z], false)) / 12));
                                    }
                                }

                                MCvMoments moments = CvInvoke.Moments(contours[z]);
                                double     area    = moments.M00;
                                boardfind  = false;
                                boardfound = true;

                                //find centre of top left square
                                boardxorigin = (Convert.ToInt32(moments.M10 / area) - gboard[0, 0].getsize());
                                boardyorigin = (Convert.ToInt32(moments.M01 / area) - gboard[0, 0].getsize());

                                //set all other board square centre x and y pixel positions (assuming same place)
                                //for some reason y positions increase as camera pixels get lower!
                                for (int i = 0; i <= 2; i++)
                                {
                                    gboard[i, 0].setxpos(boardxorigin);
                                    gboard[0, i].setypos(boardyorigin);

                                    for (int j = 0; j <= 2; j++)
                                    {
                                        gboard[i, j].setxpos(gboard[0, 0].getxpos() + (j * BOARD_SIZE));
                                        gboard[i, j].setypos(gboard[0, 0].getypos() + (i * BOARD_SIZE));
                                    }
                                }

                                //real world board dimensions for a 300mmx300mm board(each pixel size or square size)
                                //gboard[0, 0].setpsize(100 / (gboard[0, 0].getsize()));
                            }
                            //set boardfind to be true
                        }
                    }
            }

            //Following code is to find camera angles
            #region
            //to find camera angles we will find the real world distances of 2 points on the gameboard and
            //perform some mathematic operations to find the angle of the camera (we can assume that the camera
            //will remain relatively horizontal so only Y values will be used
            //DepthImagePixel[] depthData = new DepthImagePixel[640 * 480]; // Data from the Depth Frame
            //DepthImagePoint[] result = new DepthImagePoint[640 * 480];

            this.sensor.CoordinateMapper.MapDepthFrameToColorFrame(DepthImageFormat.Resolution640x480Fps30,
                                                                   this.depthPixels, ColorImageFormat.RgbResolution640x480Fps30, this.colorCoordinates);

            //
            int depthIndex1 = gboard[0, 1].getxpos() + (gboard[0, 1].getypos() * 640);
            int depthIndex2 = gboard[2, 1].getxpos() + (gboard[2, 1].getypos() * 640);

            //depth values for both points
            int depth1 = this.depthPixels[depthIndex1].Depth;
            int depth2 = this.depthPixels[depthIndex2].Depth;

            //adjusted Y pixels
            int colory1 = colorCoordinates[depthIndex1].Y;
            int colory2 = colorCoordinates[depthIndex2].Y;

            //depth of Y values of board
            //short depth1 = depthPixels[gboard[0, 1].getxpos()*gboard[0,1].getypos()].Depth;
            //short depth2 = depthPixels[gboard[2, 1].getxpos()*gboard[2,1].getypos()].Depth;

            //adjusted color pixels positions
            //int colory1 = colorCoordinates[gboard[0, 1].getxpos() * gboard[0, 1].getypos()].Y;
            //int colory2 = colorCoordinates[gboard[2, 1].getxpos() * gboard[2, 1].getypos()].Y;


            //real world y distances from the centre of the image (the constant is from established values for kinect) (mm)
            double realy1 = (colory1 - 240) * 0.001707 * depth1;
            double realy2 = (colory2 - 240) * 0.001707 * depth2;

            //calculating the size of a pixel based on the distance

            //now Performing basic triginometry (knowing that the true size between these two points
            //is 200mm)

            angle = Math.Acos((200) / (realy2 - realy1));

            #endregion

            if (boardfound)
            {
                Console.WriteLine("0,1 x"); Console.WriteLine(gboard[0, 1].getxpos());
                Console.WriteLine("0,1 y"); Console.WriteLine(gboard[0, 1].getypos());

                Console.WriteLine("1,1 x"); Console.WriteLine(gboard[1, 1].getxpos());
                Console.WriteLine("1,1 y"); Console.WriteLine(gboard[1, 1].getypos());

                Console.WriteLine("2,1 x"); Console.WriteLine(gboard[2, 1].getxpos());
                Console.WriteLine("2,1 y"); Console.WriteLine(gboard[2, 1].getypos());


                Console.WriteLine(BOARD_SIZE);

                Console.WriteLine(angle);
                Console.WriteLine(depth1);
                Console.WriteLine(depth2);
                Console.WriteLine(colory1);
                Console.WriteLine(colory2);
                Console.WriteLine(realy1);
                Console.WriteLine(realy2);

                MessageBox.Show("gameboard found ready to play");

                //CvInvoke.DestroyWindow("BoardthresholdImage");
            }
            if (!boardfound)
            {
                boardfind = false;
                MessageBox.Show("No gameboard found, adjust board/ hsv settings");
            }
        }
示例#4
0
        /// <summary>
        /// 讀取圖片的顏色(依面積最大)
        /// </summary>
        /// <param name="src">Image</param>
        /// <returns>String color</returns>
        public static string getcolor(Image <Bgr, byte> src)
        {
            //設定各種顏色的HSV範圍值
            String rlt = "";
            Dictionary <string, Hsv[]> colorrange = new Dictionary <string, Hsv[]>();

            Hsv[] ListColor = null;
            ////black
            //Hsv blacklowerLimit = new Hsv(0, 0, 0);
            //Hsv blackupperLimit = new Hsv(180, 255, 50);
            //ListColor = new Hsv[] { blacklowerLimit, blackupperLimit };
            //colorrange.Add("black", ListColor);

            ////gray
            //Hsv graylowerLimit = new Hsv(0, 0, 50);
            //Hsv grayupperLimit = new Hsv(180, 43, 220);
            //ListColor = new Hsv[] { graylowerLimit, grayupperLimit };
            //colorrange.Add("gray", ListColor);

            //white
            Hsv whitelowerLimit = new Hsv(0, 0, 221);
            Hsv whiteupperLimit = new Hsv(180, 40, 255);

            ListColor = new Hsv[] { whitelowerLimit, whiteupperLimit };
            colorrange.Add("white", ListColor);


            //Orange
            Hsv OrangelowerLimit  = new Hsv(8, 40, 50);
            Hsv OrangewupperLimit = new Hsv(34, 255, 255);

            ListColor = new Hsv[] { OrangelowerLimit, OrangewupperLimit };
            colorrange.Add("Orange", ListColor);

            //green
            Hsv greenlowerLimit = new Hsv(33, 40, 50);
            Hsv greenupperLimit = new Hsv(87, 255, 255);

            ListColor = new Hsv[] { greenlowerLimit, greenupperLimit };
            colorrange.Add("green", ListColor);

            //blue
            Hsv bluelowerLimit = new Hsv(90, 40, 50);
            Hsv blueupperLimit = new Hsv(135, 255, 255);

            ListColor = new Hsv[] { bluelowerLimit, blueupperLimit };
            colorrange.Add("blue", ListColor);

            //計算各顏色面積,並回傳最大面積之顏色名稱

            Image <Hsv, Byte> hsvsrc     = src.Clone().Convert <Hsv, Byte>();
            double            maxsumArea = 0;
            String            colorD     = "none";

            foreach (var item in colorrange)
            {
                Image <Gray, Byte> mask_hsv = hsvsrc.InRange(item.Value[0], item.Value[1]);
                Image <Gray, Byte> ThB      = null;
                ThB = mask_hsv.ThresholdBinary(new Gray(127), new Gray(255));
                //Dilate the image
                Image <Gray, Byte>    dilate = ThB.Dilate(2);
                VectorOfVectorOfPoint con    = new VectorOfVectorOfPoint();
                CvInvoke.FindContours(dilate, con, src, RetrType.External, ChainApproxMethod.ChainApproxSimple);
                double sumarea = 0;
                for (int i = 0; i < con.Size; i++)
                {
                    //獲取獨立的連通輪廓
                    VectorOfPoint contour = con[i];

                    //計算連通輪廓的面積
                    sumarea = sumarea + CvInvoke.ContourArea(contour);
                }
                if (sumarea > maxsumArea)
                {
                    colorD     = item.Key;
                    maxsumArea = sumarea;
                }
            }
            //Console.WriteLine("color::"+colorD);
            rlt = colorD;
            hsvsrc.Dispose();
            return(rlt);
        }
示例#5
0
        private static double GetScore(VectorOfPoint contours, Mat fMapMat)
        {
            short xmin = 9999;
            short xmax = 0;
            short ymin = 9999;
            short ymax = 0;

            try
            {
                foreach (Point point in contours.ToArray())
                {
                    if (point.X < xmin)
                    {
                        //var xx = nd[point.X];
                        xmin = (short)point.X;
                    }

                    if (point.X > xmax)
                    {
                        xmax = (short)point.X;
                    }

                    if (point.Y < ymin)
                    {
                        ymin = (short)point.Y;
                    }

                    if (point.Y > ymax)
                    {
                        ymax = (short)point.Y;
                    }
                }

                int roiWidth  = xmax - xmin + 1;
                int roiHeight = ymax - ymin + 1;

                Image <Gray, float> bitmap    = fMapMat.ToImage <Gray, float>();
                Image <Gray, float> roiBitmap = new Image <Gray, float>(roiWidth, roiHeight);
                float[,,] dataFloat = bitmap.Data;
                float[,,] data      = roiBitmap.Data;

                for (int j = ymin; j < ymin + roiHeight; j++)
                {
                    for (int i = xmin; i < xmin + roiWidth; i++)
                    {
                        try
                        {
                            data[j - ymin, i - xmin, 0] = dataFloat[j, i, 0];
                        }
                        catch (Exception ex2)
                        {
                            Console.WriteLine(ex2.Message);
                        }
                    }
                }

                Mat          mask = Mat.Zeros(roiHeight, roiWidth, DepthType.Cv8U, 1);
                List <Point> pts  = new List <Point>();
                foreach (Point point in contours.ToArray())
                {
                    pts.Add(new Point(point.X - xmin, point.Y - ymin));
                }

                using (VectorOfPoint vp = new VectorOfPoint(pts.ToArray <Point>()))
                    using (VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint(vp))
                    {
                        CvInvoke.FillPoly(mask, vvp, new MCvScalar(1));
                    }

                return(CvInvoke.Mean(roiBitmap, mask).V0);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message + ex.StackTrace);
            }

            return(0);
        }
        private void detectShapeToolStripMenuItem_Click(object sender, EventArgs e)
        {
            if (imgInput == null)
            {
                return;
            }

            try
            {
                var temp = imgInput.SmoothGaussian(5).Convert <Gray, byte>().ThresholdBinaryInv(new Gray(230), new Gray(255));

                VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
                Mat m = new Mat();

                CvInvoke.FindContours(temp, contours, m, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);

                for (int i = 0; i < contours.Size; i++)
                {
                    double        perimeter = CvInvoke.ArcLength(contours[i], true);
                    VectorOfPoint approx    = new VectorOfPoint();
                    CvInvoke.ApproxPolyDP(contours[i], approx, 0.04 * perimeter, true);

                    CvInvoke.DrawContours(imgInput, contours, i, new MCvScalar(0, 0, 255), 2);

                    //moments  center of the shape

                    var moments = CvInvoke.Moments(contours[i]);
                    int x       = (int)(moments.M10 / moments.M00);
                    int y       = (int)(moments.M01 / moments.M00);

                    if (approx.Size == 3)
                    {
                        CvInvoke.PutText(imgInput, "Triangle", new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 0, 255), 2);
                    }

                    if (approx.Size == 4)
                    {
                        Rectangle rect = CvInvoke.BoundingRectangle(contours[i]);

                        double ar = (double)rect.Width / rect.Height;

                        if (ar >= 0.95 && ar <= 1.05)
                        {
                            CvInvoke.PutText(imgInput, "Square", new Point(x, y),
                                             Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 0, 255), 2);
                        }
                        else
                        {
                            CvInvoke.PutText(imgInput, "Rectangle", new Point(x, y),
                                             Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 0, 255), 2);
                        }
                    }

                    if (approx.Size == 6)
                    {
                        CvInvoke.PutText(imgInput, "Hexagon", new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 0, 255), 2);
                    }


                    if (approx.Size > 6)
                    {
                        CvInvoke.PutText(imgInput, "Circle", new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 0, 255), 2);
                    }

                    pictureBox2.Image = imgInput.Bitmap;
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
示例#7
0
        private void FindLicensePlate(
            VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny,
            List <IInputOutputArray> licensePlateImagesList, List <IInputOutputArray> filteredLicensePlateImagesList, List <RotatedRect> detectedLicensePlateRegionList,
            List <String> licenses)
        {
            for (; idx >= 0; idx = hierachy[idx, 0])
            {
                int numberOfChildren = GetNumberOfChildren(hierachy, idx);
                //if it does not contains any children (charactor), it is not a license plate region
                if (numberOfChildren == 0)
                {
                    continue;
                }

                using (VectorOfPoint contour = contours[idx])
                {
                    if (CvInvoke.ContourArea(contour) > 100)
                    {
                        if (numberOfChildren < 3)
                        {
                            //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor)
                            //However we should search the children of this contour to see if any of them is a license plate
                            FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                                             filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                            continue;
                        }

                        RotatedRect box = CvInvoke.MinAreaRect(contour);
                        if (box.Angle < -45.0)
                        {
                            float tmp = box.Size.Width;
                            box.Size.Width  = box.Size.Height;
                            box.Size.Height = tmp;
                            box.Angle      += 90.0f;
                        }
                        else if (box.Angle > 45.0)
                        {
                            float tmp = box.Size.Width;
                            box.Size.Width  = box.Size.Height;
                            box.Size.Height = tmp;
                            box.Angle      -= 90.0f;
                        }

                        double whRatio = (double)box.Size.Width / box.Size.Height;
                        if (!(3.0 < whRatio && whRatio < 8.0))
                        //if (!(1.0 < whRatio && whRatio < 2.0))
                        {
                            //if the width height ratio is not in the specific range,it is not a license plate
                            //However we should search the children of this contour to see if any of them is a license plate
                            //Contour<Point> child = contours.VNext;
                            if (hierachy[idx, 2] > 0)
                            {
                                FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                                                 filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                            }
                            continue;
                        }

                        using (UMat tmp1 = new UMat())
                            using (UMat tmp2 = new UMat())
                            {
                                PointF[] srcCorners = box.GetVertices();

                                PointF[] destCorners = new PointF[] {
                                    new PointF(0, box.Size.Height - 1),
                                    new PointF(0, 0),
                                    new PointF(box.Size.Width - 1, 0),
                                    new PointF(box.Size.Width - 1, box.Size.Height - 1)
                                };

                                using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners))
                                {
                                    CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size));
                                }

                                //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract
                                Size   approxSize = new Size(200, 190);
                                double scale      = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height);
                                Size   newSize    = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale));
                                CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic);

                                //removes some pixels from the edge
                                int       edgePixelSize = 4;
                                Rectangle newRoi        = new Rectangle(new Point(edgePixelSize, edgePixelSize),
                                                                        tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize));
                                UMat plate = new UMat(tmp2, newRoi);

                                UMat filteredPlate = FilterPlate(plate);

                                Tesseract.Character[] words;
                                StringBuilder         strBuilder = new StringBuilder();
                                using (UMat tmp = filteredPlate.Clone())
                                {
                                    _ocr.Recognize(tmp);
                                    words = _ocr.GetCharacters();

                                    if (words.Length == 0)
                                    {
                                        continue;
                                    }

                                    for (int i = 0; i < words.Length; i++)
                                    {
                                        strBuilder.Append(words[i].Text);
                                    }
                                }

                                licenses.Add(strBuilder.ToString());
                                licensePlateImagesList.Add(plate);
                                filteredLicensePlateImagesList.Add(filteredPlate);
                                detectedLicensePlateRegionList.Add(box);
                            }
                    }
                }
            }
        }
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            Skeleton[] skeletons = new Skeleton[0];

            bool depthReceived = false;
            bool colorReceived = false;

            using (DepthImageFrame framesDistancia = e.OpenDepthImageFrame())
            {
                if (framesDistancia == null)
                {
                    return;
                }

                framesDistancia.CopyDepthImagePixelDataTo(this.depthPixels);

                depthReceived = true;


                if (datosDistancia == null)
                {
                    datosDistancia = new short[framesDistancia.PixelDataLength];
                }

                if (colorImagenDistancia == null)
                {
                    colorImagenDistancia = new byte[framesDistancia.PixelDataLength * 4];
                }

                framesDistancia.CopyPixelDataTo(datosDistancia);
            }

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    colorFrame.CopyPixelDataTo(this.colorPixels);

                    colorReceived = true;

                    System.Drawing.Bitmap bmp             = EmguCVHelper.ImageToBitmap(colorFrame);
                    Image <Hsv, Byte>     currentFrameHSV = new Image <Hsv, byte>(bmp);

                    Image <Gray, Byte> grayFrame = currentFrameHSV.Convert <Gray, Byte>();

                    Image <Gray, Byte> imageHSVDest = currentFrameHSV.InRange(lowerLimit, upperLimit);
                    imageHSVDest.Erode(100);
                    VectorOfVectorOfPoint vectorOfPoint = EmguCVHelper.FindContours(imageHSVDest);

                    for (int i = 0; i < vectorOfPoint.Size; i++)
                    {
                        var contour = vectorOfPoint[i];
                        var area    = CvInvoke.ContourArea(contour);
                        if (area > 100)
                        {
                            System.Drawing.Rectangle rec = CvInvoke.BoundingRectangle(contour);
                            Point p1 = new Point(rec.X, rec.Y);
                            Point p2 = new Point(rec.X + rec.Width, rec.Y + rec.Height);
                            ObjetoX = (p1.X + p2.X) / 2;
                            ObjetoY = (p1.Y + p2.Y) / 2;

                            if (true == depthReceived)
                            {
                                this.sensor.CoordinateMapper.MapDepthFrameToColorFrame(
                                    DepthFormat,
                                    this.depthPixels,
                                    ColorFormat,
                                    this.colorCoordinates);


                                int             depthIndex = (int)ObjetoX + ((int)ObjetoY * this.depthWidth);
                                DepthImagePixel depthPixel = this.depthPixels[depthIndex];


                                ObjetoZ = datosDistancia[depthIndex] >> 3;

                                int X = (int)ObjetoX / this.colorToDepthDivisor;
                                int Y = (int)ObjetoY / this.colorToDepthDivisor;
                            }


                            if (ObjetoZ > 0)
                            {
                                skelObjeto = DistanceHelper.ObtenerSkelPoint((int)ObjetoX, (int)ObjetoY,
                                                                             ObjetoZ, this.sensor);

                                flagObjeto = true;
                            }
                        }
                    }

                    colorFrame.CopyPixelDataTo(this.colorPixels);
                    // Write the pixel data into our bitmap
                    this.colorBitmap.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                        this.colorPixels,
                        this.colorBitmap.PixelWidth * sizeof(int),
                        0);
                }
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo(skeletons);
                }
            }

            using (DrawingContext dc = this.drawingGroup.Open())
            {
                // Draw a transparent background to set the render size
                //dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, RenderWidth, RenderHeight));
                dc.DrawImage(this.colorBitmap, new Rect(0.0, 0.0, RenderWidth, RenderHeight));
                if (skeletons.Length != 0)
                {
                    foreach (Skeleton skel in skeletons)
                    {
                        if (skel.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            this.DrawBonesAndJoints(skel, dc);
                        }

                        //Toma de mediciones de mano, hombro y codo derecho:
                        ManoDerecha = skel.Joints[JointType.HandRight];
                        //Joint munecaDer = skel.Joints[JointType.WristRight];
                        CodoDerecho   = skel.Joints[JointType.ElbowRight];
                        HombroDerecho = skel.Joints[JointType.ShoulderRight];

                        //Dibujo un punto negro sobre el objeto detectado
                        Point objeto = new Point(this.ObjetoX, this.ObjetoY);
                        dc.DrawEllipse(Brushes.Black, new Pen(Brushes.Black, 5), objeto, 5, 5);

                        if ((HombroDerecho.TrackingState == JointTrackingState.Tracked) &&
                            (ManoDerecha.TrackingState == JointTrackingState.Tracked) &&
                            (CodoDerecho.TrackingState == JointTrackingState.Tracked))
                        {
                            if (flagObjeto && !flagSkeleton)
                            {
                                CalcularAngulosFinales();
                            }

                            //Console.WriteLine($"Mano X Y Z {handRight.Position.X} {handRight.Position.Y} {handRight.Position.Z}");
                            //Console.WriteLine($"Objeto X Y Z {skelObjeto.X} {skelObjeto.Y} {skelObjeto.Z}");

                            if (DistanceHelper.ObtenerDistancia(ManoDerecha, skelObjeto) < 0.1)
                            {
                                //significa que se llegó al objeto, por lo que se cierra la ventana y se envían
                                //los datos.
                                resultado = true;
                                this.Close();
                            }
                        }
                    }
                }

                // prevent drawing outside of our render area
                this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, RenderWidth, RenderHeight));
            }
        }
        private Mat DetectObject(Mat detectionFrame, Mat displayFrame)
        {
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                detectGesture = false;
                VectorOfPoint biggestContour = null;
                IOutputArray  hirarchy       = null;
                // Crear lista de contornos
                CvInvoke.FindContours(detectionFrame, contours, hirarchy, RetrType.List, ChainApproxMethod.ChainApproxSimple);

                // Selecciona el contour mas grande
                if (contours.Size > 0)
                {
                    double        maxArea = 0;
                    int           chosen  = 0;
                    VectorOfPoint contour = null;
                    for (int i = 0; i < contours.Size; i++)
                    {
                        contour = contours[i];

                        double area = CvInvoke.ContourArea(contour);
                        if (area > maxArea)
                        {
                            maxArea = area;
                            chosen  = i;
                        }
                    }

                    // Dibuja un frame
                    MarkDetectedObject(displayFrame, contours[chosen], maxArea);

                    VectorOfPoint hullPoints = new VectorOfPoint();
                    VectorOfInt   hullInt    = new VectorOfInt();

                    CvInvoke.ConvexHull(contours[chosen], hullPoints, true);
                    CvInvoke.ConvexHull(contours[chosen], hullInt, false);

                    Mat defects = new Mat();

                    if (hullInt.Size > 3)
                    {
                        detectGesture = true;
                    }
                    CvInvoke.ConvexityDefects(contours[chosen], hullInt, defects);

                    Rectangle box = CvInvoke.BoundingRectangle(hullPoints);
                    CvInvoke.Rectangle(displayFrame, box, drawingColor);

                    Point center = new Point(box.X + box.Width / 2, box.Y + box.Height / 2);

                    VectorOfPoint start_points = new VectorOfPoint();
                    VectorOfPoint far_points   = new VectorOfPoint();

                    if (!defects.IsEmpty)
                    {
                        //Los datos del Mat no se pueden leer directamente, por lo que los convertimos a Matrix<>
                        Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols,
                                                          defects.NumberOfChannels);
                        defects.CopyTo(m);
                        gestualNum = 0;
                        int x = int.MaxValue, y = int.MaxValue;
                        for (int i = 0; i < m.Rows; i++)
                        {
                            int   startIdx   = m.Data[i, 0];
                            int   endIdx     = m.Data[i, 1];
                            int   farIdx     = m.Data[i, 2];
                            Point startPoint = contours[chosen][startIdx];
                            Point endPoint   = contours[chosen][endIdx];
                            Point farPoint   = contours[chosen][farIdx];
                            CvInvoke.Circle(displayFrame, endPoint, 3, new MCvScalar(0, 255, 255));
                            CvInvoke.Circle(displayFrame, startPoint, 3, new MCvScalar(255, 255, 0));

                            //if (true)
                            //{
                            if (endPoint.Y < y)
                            {
                                x = endPoint.X;
                                y = endPoint.Y;
                            }

                            //}

                            double distance = Math.Round(Math.Sqrt(Math.Pow((center.X - farPoint.X), 2) + Math.Pow((center.Y - farPoint.Y), 2)), 1);
                            if (distance < box.Height * 0.3)
                            {
                                CvInvoke.Circle(displayFrame, farPoint, 10, new MCvScalar(255, 0, 0), 4);
                                gestualNum++;
                            }
                            //dibuja una línea que conecta el punto de inicio del defecto de convexidad y el punto final en una línea roja
                            CvInvoke.Line(displayFrame, startPoint, endPoint, new MCvScalar(0, 255, 255));
                        }

                        if (gestualNum >= 4)
                        {
                            //Console.WriteLine("numero gestual 4");
                            gestualNumRepite++;
                        }
                        else
                        {
                            gestualNumRepite = 0;
                        }
                        if (gestualNumRepite == 3)
                        {
                            Console.WriteLine("numero gestual 5 Click");
                            gestualNumRepite = 0;

                            if (detectClick == true)
                            {
                                detectClick = false;
                            }
                            else
                            {
                                detectClick = true;
                            }
                        }
                        Console.WriteLine("numero gestual " + gestualNum);
                        //var info = new string[] { $"Puntero", $"Posicion: {x}, {y}" };

                        //WriteMultilineText(displayFrame, info, new Point(x + 30, y));
                        centerSensor.X = x;
                        centerSensor.Y = y;
                        CvInvoke.Circle(displayFrame, new Point(x, y), 20, new MCvScalar(255, 0, 255), 2);
                        //CvInvoke.Circle(picture, new Point(x * 2, y * 4), 20, new MCvScalar(255, 0, 255), 2);
                        return(displayFrame);
                    }
                    // detectGesture = false;
                    //  return displayFrame;
                }

                return(displayFrame);
            }
        }
示例#10
0
        public double CalcSSIM(Image <Bgr, Byte> img1_temp, Image <Bgr, Byte> img2_temp)
        {
            if (SSIM[(int)RGBIndex.All] != -1)
            {
                return(SSIM[(int)RGBIndex.All]);
            }

            if (img1_temp.Size != img2_temp.Size || img1_temp.NumberOfChannels != img2_temp.NumberOfChannels)
            {
                throw outOfSizeException;
            }

            int      imageWidth  = img1_temp.Width;
            int      imageHeight = img1_temp.Height;
            int      nChan       = img1_temp.NumberOfChannels;
            IplDepth depth32F    = IplDepth.IplDepth32F;
            Size     imageSize   = new Size(imageWidth, imageHeight);

            Image <Bgr, Single> img1 = img1_temp.ConvertScale <Single>(1.0, 1);
            Image <Bgr, Single> img2 = img2_temp.ConvertScale <Single>(1.0, 1);
            Image <Bgr, Byte>   diff = img2_temp.Copy();

            Image <Bgr, Single> img1_sq   = img1.Pow(2);
            Image <Bgr, Single> img2_sq   = img2.Pow(2);
            Image <Bgr, Single> img1_img2 = img1.Mul(img2);

            Image <Bgr, Single> mu1 = img1.SmoothGaussian(11, 11, 1.5, 0);
            Image <Bgr, Single> mu2 = img2.SmoothGaussian(11, 11, 1.5, 0);

            Image <Bgr, Single> mu1_sq  = mu1.Pow(2);
            Image <Bgr, Single> mu2_sq  = mu2.Pow(2);
            Image <Bgr, Single> mu1_mu2 = mu1.Mul(mu2);

            Image <Bgr, Single> sigma1_sq = img1_sq.SmoothGaussian(11, 11, 1.5, 0);

            sigma1_sq = sigma1_sq.AddWeighted(mu1_sq, 1, -1, 0);

            Image <Bgr, Single> sigma2_sq = img2_sq.SmoothGaussian(11, 11, 1.5, 0);

            sigma2_sq = sigma2_sq.AddWeighted(mu2_sq, 1, -1, 0);

            Image <Bgr, Single> sigma12 = img1_img2.SmoothGaussian(11, 11, 1.5, 0);

            sigma12 = sigma12.AddWeighted(mu1_mu2, 1, -1, 0);

            // (2*mu1_mu2 + C1)
            Image <Bgr, Single> temp1 = mu1_mu2.ConvertScale <Single>(2, 0);

            temp1 = temp1.Add(new Bgr(C1, C1, C1));

            // (2*sigma12 + C2)
            Image <Bgr, Single> temp2 = sigma12.ConvertScale <Single>(2, 0);

            temp2 = temp2.Add(new Bgr(C2, C2, C2));

            // ((2*mu1_mu2 + C1).*(2*sigma12 + C2))
            Image <Bgr, Single> temp3 = temp1.Mul(temp2);

            // (mu1_sq + mu2_sq + C1)
            temp1 = mu1_sq.Add(mu2_sq);
            temp1 = temp1.Add(new Bgr(C1, C1, C1));

            // (sigma1_sq + sigma2_sq + C2)
            temp2 = sigma1_sq.Add(sigma2_sq);
            temp2 = temp2.Add(new Bgr(C2, C2, C2));

            // ((mu1_sq + mu2_sq + C1).*(sigma1_sq + sigma2_sq + C2))
            temp1 = temp1.Mul(temp2, 1);

            // ((2*mu1_mu2 + C1).*(2*sigma12 + C2))./((mu1_sq + mu2_sq + C1).*(sigma1_sq + sigma2_sq + C2))
            Image <Bgr, Single> ssim_map = new Image <Bgr, float>(imageSize);

            CvInvoke.Divide(temp3, temp1, ssim_map);

            Bgr       avg = new Bgr();
            MCvScalar sdv = new MCvScalar();

            ssim_map.AvgSdv(out avg, out sdv);

            SSIM[(int)RGBIndex.Red]   = avg.Red;
            SSIM[(int)RGBIndex.Green] = avg.Green;
            SSIM[(int)RGBIndex.Blue]  = avg.Blue;
            SSIM[(int)RGBIndex.All]   = (avg.Red + avg.Green + avg.Blue) / 3.0;

            if (SSIM[(int)RGBIndex.All] == 1)//Same Image
            {
                NumDifferences = 0;
                return(SSIM[(int)RGBIndex.All]);
            }

            Image <Gray, Single> gray32 = new Image <Gray, float>(imageSize);

            CvInvoke.CvtColor(ssim_map, gray32, ColorConversion.Bgr2Gray);

            Image <Gray, Byte> gray8 = gray32.ConvertScale <Byte>(255, 0);
            Image <Gray, Byte> gray1 = gray8.ThresholdBinaryInv(new Gray(254), new Gray(255));

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(gray1, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

            NumDifferences = contours.Size;
            if (ImageDifferent == null)
            {
                return(SSIM[(int)RGBIndex.All]);
            }

            try
            {
                diff.Save(ImageDifferent);
            }
            catch (Exception ex)
            {
                throw new ImageDiffException(ex.Message);
            }
            return(SSIM[(int)RGBIndex.All]);
        }
示例#11
0
        private bool DetectFirstRule(CircleF ball, VectorOfPoint detectedHand, Mat image)
        {
            Mat circle = new Mat(image.Rows, image.Cols, image.Depth, image.NumberOfChannels);
            Mat hand   = new Mat(image.Rows, image.Cols, image.Depth, image.NumberOfChannels);

            circle.SetTo(new MCvScalar(0));
            hand.SetTo(new MCvScalar(0));

            int averageBallRadius = lastHalfSecondRadiuses.Count > 0 ? lastHalfSecondRadiuses.Max() : (int)ball.Radius;

            if (ball.Radius > 0)
            {
                CvInvoke.Circle(circle, System.Drawing.Point.Round(ball.Center), averageBallRadius, new Bgr(System.Drawing.Color.White).MCvScalar, -1);
            }

            if (detectedHand.Size != 0)
            {
                var           cont = new VectorOfVectorOfPoint(detectedHand);
                VectorOfPoint hull = new VectorOfPoint();
                CvInvoke.ConvexHull(detectedHand, hull, false, true);
                cont = new VectorOfVectorOfPoint(hull);
                CvInvoke.DrawContours(hand, cont, 0, new Bgr(System.Drawing.Color.White).MCvScalar, -1);
            }

            Mat res = new Mat(image.Rows, image.Cols, image.Depth, image.NumberOfChannels);

            CvInvoke.BitwiseAnd(circle, hand, res);
            CvInvoke.CvtColor(res, res, ColorConversion.Hsv2Bgr);
            CvInvoke.CvtColor(res, res, ColorConversion.Bgr2Gray);
            bool ballInHand = CvInvoke.CountNonZero(res) > 0;

            /*double isInSideSouth = -1.0;
             * double isInSideNorth = -1.0;
             * double isInSideEast = -1.0;
             * double isInSideWest = -1.0;*/
            /*if (detectedHand.Size != 0 && ball.Radius > 0)
             * {
             *  isInSideSouth = CvInvoke.PointPolygonTest(detectedHand, new PointF(ball.Center.X, ball.Center.Y + ball.Radius), false);
             *  isInSideNorth = CvInvoke.PointPolygonTest(detectedHand, new PointF(ball.Center.X, ball.Center.Y - ball.Radius), false);
             *  isInSideEast = CvInvoke.PointPolygonTest(detectedHand, new PointF(ball.Center.X + ball.Radius, ball.Center.Y), false);
             *  isInSideWest = CvInvoke.PointPolygonTest(detectedHand, new PointF(ball.Center.X - ball.Radius, ball.Center.Y), false);
             * }
             * if (isInSideSouth == 1 || isInSideSouth == 0 || isInSideNorth == 1 || isInSideNorth == 0 || isInSideEast == 1 || isInSideEast == 0 || isInSideWest == 1 || isInSideWest == 0)
             * {
             *  ballInHand = true;
             * }
             * else
             * {
             *  ballInHand = false;
             * }*/
            if (ballInHand == true)
            {
                lastFrameBallInHand = ball;
            }
            if (ballInHand == false && ballIsInHand == true)
            {
                rule = InitiateRule.SecondRule;
            }
            ballIsInHand = ballInHand;
            return(ballInHand);
        }
示例#12
0
      private void FindStopSign(Mat img, List<Mat> stopSignList, List<Rectangle> boxList, VectorOfVectorOfPoint contours, int[,] hierachy, int idx)
      {
         for (; idx >= 0; idx = hierachy[idx, 0])
         {
            using (VectorOfPoint c = contours[idx])
            using (VectorOfPoint approx = new VectorOfPoint())
            {
               CvInvoke.ApproxPolyDP(c, approx, CvInvoke.ArcLength(c, true) * 0.02, true);
               double area = CvInvoke.ContourArea(approx);
               if (area > 200)
               {
                  double ratio = CvInvoke.MatchShapes(_octagon, approx, Emgu.CV.CvEnum.ContoursMatchType.I3);

                  if (ratio > 0.1) //not a good match of contour shape
                  {
                     //check children
                     if (hierachy[idx, 2] >= 0)
                        FindStopSign(img, stopSignList, boxList, contours, hierachy, hierachy[idx, 2]);
                     continue;
                  }

                  Rectangle box = CvInvoke.BoundingRectangle(c);

                  Mat candidate = new Mat();
                  using (Mat tmp = new Mat(img, box))
                     CvInvoke.CvtColor(tmp, candidate, ColorConversion.Bgr2Gray);

                  //set the value of pixels not in the contour region to zero
                  using (Mat mask = new Mat(candidate.Size.Height, candidate.Width, DepthType.Cv8U, 1))
                  {
                     mask.SetTo(new MCvScalar(0));
                     CvInvoke.DrawContours(mask, contours, idx, new MCvScalar(255), -1, LineType.EightConnected, null, int.MaxValue, new Point(-box.X, -box.Y));

                     double mean = CvInvoke.Mean(candidate, mask).V0;
                     CvInvoke.Threshold(candidate, candidate, mean, 255, ThresholdType.Binary);
                     CvInvoke.BitwiseNot(candidate, candidate);
                     CvInvoke.BitwiseNot(mask, mask);

                     candidate.SetTo(new MCvScalar(0), mask);
                  }

                  int minMatchCount = 8;
                  double uniquenessThreshold = 0.8;
                  VectorOfKeyPoint _observeredKeypoint = new VectorOfKeyPoint();
                  Mat _observeredDescriptor = new Mat();
                  _detector.DetectAndCompute(candidate, null, _observeredKeypoint, _observeredDescriptor, false);

                  if (_observeredKeypoint.Size >= minMatchCount)
                  {
                     int k = 2;

                     Mat mask;

                     using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                     {
                        _modelDescriptorMatcher.KnnMatch(_observeredDescriptor, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                     }

                     int nonZeroCount = CvInvoke.CountNonZero(mask);
                     if (nonZeroCount >= minMatchCount)
                     {
                        boxList.Add(box);
                        stopSignList.Add(candidate);
                     }
                  }
               }
            }
         }
      }
示例#13
0
        static ContourProperties FishContour(Mat image_raw, Mat background, Point tc, List <ContourProperties> blist, bool control)
        {
// BUG IN HERE IS THAT CONTPROPS HEIGHT GETS SET EVEN WHEN THERE IS NO CONTOUR FOUND. THIS OCCURS BEFORE ENTERING THE LOOP
// BASED ON CONTRPOPS.HEIGHT SIZE. YOU RETURN SOMETHING WITH A HEIGHT BUT NO COORDINATE (0,0) AND THE MAIN LINE THINKS YOU HAVE A CONTOUR AT 0,0.
            bool fishcont_found             = false;
            Size frsize                     = new Size(image_raw.Width, image_raw.Height);
            Mat  image                      = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
            ContourProperties     contprops = new ContourProperties();
            ThresholdType         ttype     = 0;
            VectorOfVectorOfPoint contours  = new VectorOfVectorOfPoint();
            Mat hierarchy                   = new Mat();

            CvInvoke.AbsDiff(image_raw, background, image);
            // This should be 30 as the LB. Switched to 20 to see if i could pick up paramecia.
            CvInvoke.Threshold(image, image, 25, 255, ttype);
            // IF YOU NEED TO SHOW THE THRESHOLDED IMAGE, UNCOMMENT THESE LINES
//            String camerawindow2 = "Camera Window 2";
//          CvInvoke.NamedWindow(camerawindow2);
//        CvInvoke.Imshow(camerawindow2, image);
//      CvInvoke.WaitKey(1);
            CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
            int       fish_contour_index = 0;
            int       height             = 0;
            Point     contourCOM         = new Point();
            Point     contour_center     = new Point();
            Rectangle bounding_rect      = new Rectangle();

            for (int ind = 0; ind < contours.Size; ind++)
            {
                MCvMoments com = new MCvMoments();
                com              = CvInvoke.Moments(contours[ind]);
                contourCOM.X     = (int)(com.M10 / com.M00);
                contourCOM.Y     = (int)(com.M01 / com.M00);
                bounding_rect    = CvInvoke.BoundingRectangle(contours[ind]);
                contour_center.X = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2);
                contour_center.Y = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2);
                if (bounding_rect.Width > bounding_rect.Height)
                {
                    height = bounding_rect.Width;
                }
                else
                {
                    height = bounding_rect.Height;
                }
                if (height < 60 && height > 8)
                {
                    if (image_raw.Width > 1000)
                    {
                        if (!control)
                        {
                            bool tooclose = false;
                            for (int i = 0; i < blist.Count; i++)
                            {
// This allows 3, 4, or 5 to be recorded as a COM center, but would be rejected by Tap
                                if (VectorMag(blist[i].center, contourCOM) - (blist[i].height / 2) < 3)
                                {
                                    tooclose = true;
                                    break;
                                }
                            }
                            if (tooclose)
                            {
                                continue;
                            }
                        }
                        if (VectorMag(contourCOM, tc) > 460)
                        //this tells the algorithm not to look for fish outside the tank.
                        {
                            continue;
                        }
                        if (contourCOM.X < 0 || contourCOM.Y < 0)
                        {
                            continue;
                        }
                    }
                    fish_contour_index = ind;
                    fishcont_found     = true;
                    break;
                }
            }
            if (fishcont_found)
            {
// could also choose the contour center below using the bounding rect
                contprops.com    = contourCOM;
                contprops.height = height;
                contprops.center = contour_center;
            }
            return(contprops);
        }
示例#14
0
        private void ImageRecognition_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                Image <Gray, byte> OutputImage = InputImage.Convert <Gray, byte>().ThresholdBinary(new Gray(100), new Gray(255));
                int i;

                //Применение фильтрации при необходимости
                if (FilterBox.IsChecked == true)
                {
                    CvInvoke.Erode(OutputImage, OutputImage, new Mat(), new System.Drawing.Point(-1, -1), 5, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
                    CvInvoke.Dilate(OutputImage, OutputImage, new Mat(), new System.Drawing.Point(-1, -1), 5, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
                }

                //Поиск контура
                VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
                Mat hierarchy = new Mat();
                CvInvoke.FindContours(OutputImage,
                                      contours,
                                      hierarchy,
                                      Emgu.CV.CvEnum.RetrType.Tree,
                                      Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);

                //ResultImage для фильтров
                Image <Bgr, byte> ResultImage = OutputImage.Convert <Bgr, byte>();

                //Отрисовка контура
                CvInvoke.DrawContours(ResultImage, contours, -1, new MCvScalar(255, 10, 10), 3);

                //Опорные точки
                VectorOfPoint contour = new VectorOfPoint();
                VectorOfInt   hull    = new VectorOfInt();
                contour = contours[0];
                CvInvoke.ConvexHull(contour, hull, false, false);

                //Дефекты выпуклости
                Mat convexityDefects = new Mat();
                CvInvoke.ConvexityDefects(contour, hull, convexityDefects);

                //Преобразование в матрицу для удобства
                Matrix <int> matrixOfDefects = new Matrix <int>(convexityDefects.Rows, convexityDefects.Cols, convexityDefects.NumberOfChannels);
                convexityDefects.CopyTo(matrixOfDefects);
                Matrix <int>[] channels = matrixOfDefects.Split();
                CircleF        circle   = CvInvoke.MinEnclosingCircle(contour);
                //
                List <System.Drawing.PointF> PointsMemory = new List <System.Drawing.PointF>();

                //Для поиска центра
                RotatedRect minAreaRect = CvInvoke.MinAreaRect(contour);

                //Отбор точек
                for (i = 0; i < matrixOfDefects.Rows; ++i)
                {
                    if (PropertyBox.IsChecked == true)
                    {
                        #region Через Описывающий прямоугольник
                        //C - Center  S - Start[0]  E - End[1]  D - Depth[2]
                        //Start - Center
                        float LengthXSC = contour[channels[0][i, 0]].X - minAreaRect.Center.X;
                        float LengthYSC = contour[channels[0][i, 0]].Y - minAreaRect.Center.Y;
                        float LengthSC  = (float)Math.Sqrt(Math.Pow(LengthXSC, 2) + Math.Pow(LengthYSC, 2));

                        //Расстояние от начальной точки до дефекта
                        float LengthXSD = contour[channels[0][i, 0]].X - contour[channels[2][i, 0]].X;
                        float LengthYSD = contour[channels[0][i, 0]].Y - contour[channels[2][i, 0]].X;
                        float LengthSD  = (float)Math.Sqrt(Math.Pow(LengthXSD, 2) + Math.Pow(LengthYSD, 2));

                        //Расстояние от начала до конца вектора
                        float LengthXSE = contour[channels[0][i, 0]].X - contour[channels[1][i, 0]].X;
                        float LengthYSE = contour[channels[0][i, 0]].Y - contour[channels[1][i, 0]].X;
                        float LengthSE  = (float)Math.Sqrt(Math.Pow(LengthXSE, 2) + Math.Pow(LengthYSE, 2));

                        //Расстояние от дефекта до центра
                        float LengthXDC = contour[channels[2][i, 0]].X - minAreaRect.Center.X;
                        float LengthYDC = contour[channels[2][i, 0]].Y - minAreaRect.Center.Y;
                        float LengthDC  = (float)Math.Sqrt(Math.Pow(LengthXDC, 2) + Math.Pow(LengthYDC, 2));

                        //Расстояние от центра до грани под 90
                        float MinRadius = minAreaRect.Size.Width / 2;

                        //Отбор точек по условиям
                        if (LengthSC >= MinRadius * 0.85 &&
                            (LengthSE <= MinRadius || LengthSE >= MinRadius * 0.95) &&
                            (LengthSD > MinRadius * 0.3 && LengthDC <= MinRadius * 0.9))
                        {
                            PointsMemory.Add(new System.Drawing.PointF(contour[channels[0][i, 0]].X, contour[channels[0][i, 0]].Y));
                            ResultImage.Draw(new System.Drawing.Point[] { contour[channels[0][i, 0]], contour[channels[2][i, 0]] }, new Bgr(0, 255, 0), 2);
                        }
                        ResultImage.Draw(new RotatedRect(minAreaRect.Center, minAreaRect.Size, minAreaRect.Angle), new Bgr(125, 125, 125), 2);
                        #endregion
                    }
                    else
                    {
                        #region Через описывающую окружность
                        //C - Center  S - Start[0]  E - End[1]  D - Depth[2]
                        //Start - Center
                        float LengthXSC = contour[channels[0][i, 0]].X - circle.Center.X;
                        float LengthYSC = contour[channels[0][i, 0]].Y - circle.Center.Y;
                        float LengthSC  = (float)Math.Sqrt(Math.Pow(LengthXSC, 2) + Math.Pow(LengthYSC, 2));

                        //Расстояние от начальной точки до дефекта
                        float LengthXSD = contour[channels[0][i, 0]].X - contour[channels[2][i, 0]].X;
                        float LengthYSD = contour[channels[0][i, 0]].Y - contour[channels[2][i, 0]].X;
                        float LengthSD  = (float)Math.Sqrt(Math.Pow(LengthXSD, 2) + Math.Pow(LengthYSD, 2));

                        //Расстояние от начала до конца вектора
                        float LengthXSE = contour[channels[0][i, 0]].X - contour[channels[1][i, 0]].X;
                        float LengthYSE = contour[channels[0][i, 0]].Y - contour[channels[1][i, 0]].X;
                        float LengthSE  = (float)Math.Sqrt(Math.Pow(LengthXSE, 2) + Math.Pow(LengthYSE, 2));

                        //Расстояние от дефекта до центра
                        float LengthXDC = contour[channels[2][i, 0]].X - circle.Center.X;
                        float LengthYDC = contour[channels[2][i, 0]].Y - circle.Center.Y;
                        float LengthDC  = (float)Math.Sqrt(Math.Pow(LengthXDC, 2) + Math.Pow(LengthYDC, 2));


                        //Отбор точек по условиям
                        if (LengthSC >= circle.Radius * 0.5 &&
                            (LengthSE <= circle.Radius * 0.8 || LengthSE >= circle.Radius) &&
                            (LengthDC <= LengthSD * 0.9 || LengthSD > circle.Radius * 0.4))
                        {
                            ResultImage.Draw(new System.Drawing.Point[] { contour[channels[0][i, 0]], contour[channels[2][i, 0]] }, new Bgr(0, 255, 0), 2);
                            PointsMemory.Add(new System.Drawing.PointF(contour[channels[0][i, 0]].X, contour[channels[0][i, 0]].Y));
                        }
                        ResultImage.Draw(new CircleF(circle.Center, circle.Radius), new Bgr(0, 255, 255), 2);
                        #endregion
                    }
                }
                foreach (System.Drawing.PointF pt in PointsMemory)
                {
                    ResultImage.Draw(new CircleF(pt, 3), new Bgr(0, 0, 255), 3);
                }

                OutputImageBox.Source = BitmapSourceConvert.ToBitmapSource(ResultImage);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message, "Ошибка", MessageBoxButton.OK, MessageBoxImage.Error);
            }
        }
示例#15
0
        private void shapeDetectionToolStripMenuItem_Click(object sender, EventArgs e)
        {
            try
            {
                if (imgInput == null)
                {
                    throw new Exception("Please select an image");
                }

                var tmp = imgInput.SmoothGaussian(5).Convert <Gray, byte>()
                          .ThresholdBinaryInv(new Gray(230), new Gray(255)); // Inv because the background of the image is white, so remove it

                VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
                Mat hier = new Mat();

                CvInvoke.FindContours(tmp, contours,
                                      hier,
                                      Emgu.CV.CvEnum.RetrType.External,
                                      Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);

                for (int i = 0; i < contours.Size; i++)
                {
                    double        parameter = CvInvoke.ArcLength(contours[i], true);
                    VectorOfPoint approx    = new VectorOfPoint();

                    CvInvoke.ApproxPolyDP(contours[i], approx, .04 * parameter, true);

                    CvInvoke.DrawContours(imgInput, contours, i, new MCvScalar(255, 0, 255), 2);

                    // Moments : concept for finding shaeps, area, measures...
                    // Here we want the center of the shape
                    var moments = CvInvoke.Moments(contours[i]);
                    int x       = (int)(moments.M10 / moments.M00);
                    int y       = (int)(moments.M01 / moments.M00);

                    if (approx.Size == 3) // Triangle
                    {
                        CvInvoke.PutText(imgInput, "Triangle",
                                         new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex,
                                         .5,
                                         new MCvScalar(255, 0, 0),
                                         2);
                    }

                    if (approx.Size == 4) // Rectangle
                    {
                        // Check for Square
                        Rectangle rect = CvInvoke.BoundingRectangle(contours[i]);
                        double    ar   = (double)rect.Width / rect.Height;
                        if (ar >= .95 && ar <= 1.05)
                        {
                            CvInvoke.PutText(imgInput, "Square",
                                             new Point(x, y),
                                             Emgu.CV.CvEnum.FontFace.HersheySimplex,
                                             .5,
                                             new MCvScalar(255, 0, 0),
                                             2);
                        }
                        else
                        {
                            CvInvoke.PutText(imgInput, "Rectangle",
                                             new Point(x, y),
                                             Emgu.CV.CvEnum.FontFace.HersheySimplex,
                                             .5,
                                             new MCvScalar(255, 0, 0),
                                             2);
                        }
                    }

                    if (approx.Size == 5) // Pentagon
                    {
                        CvInvoke.PutText(imgInput, "Pentagon",
                                         new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex,
                                         .5,
                                         new MCvScalar(255, 0, 0),
                                         2);
                    }

                    if (approx.Size == 6) // Hexagon
                    {
                        CvInvoke.PutText(imgInput, "Hexagon",
                                         new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex,
                                         .5,
                                         new MCvScalar(255, 0, 0),
                                         2);
                    }

                    if (approx.Size > 6) // Circle
                    {
                        CvInvoke.PutText(imgInput, "Circle",
                                         new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex,
                                         .5,
                                         new MCvScalar(255, 0, 0),
                                         2);
                    }

                    pictureBox2.Image = imgInput.Bitmap;
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
示例#16
0
        private void ProcessingImg2()
        {
            Bitmap            bitmap      = (Bitmap)this.pictureBox1.Image;
            Image <Bgr, Byte> imageSource = new Image <Bgr, byte>(bitmap);
            //Image<Gray, Byte> imageGrayscale = imageSource.Convert<Gray, Byte>();

            //imageGrayscale = randon(imageGrayscale);
            // 灰度化
            Image <Gray, byte> imgGray = new Image <Gray, byte>(bitmap.Size);

            CvInvoke.CvtColor(imageSource, imgGray, ColorConversion.Bgr2Gray);
            this.pictureBox15.Image = imgGray.ToBitmap();


            //截断阈值化
            Image <Gray, byte> imgThresholdTrunc = new Image <Gray, byte>(imgGray.Size);

            CvInvoke.Threshold(imgGray, imgThresholdTrunc, 60, 255, ThresholdType.Trunc);
            this.pictureBox16.Image = imgThresholdTrunc.ToBitmap();


            // 消除裂缝
            Mat oMat1 = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle,
                                                       new System.Drawing.Size(6, 6), new System.Drawing.Point(0, 0));
            Image <Gray, byte> imgMorphologyEx = new Image <Gray, byte>(imgGray.Size);

            CvInvoke.MorphologyEx(imgThresholdTrunc, imgMorphologyEx, Emgu.CV.CvEnum.MorphOp.Close, oMat1,
                                  new System.Drawing.Point(0, 0), 1, BorderType.Default,
                                  new MCvScalar(255, 0, 0, 255));
            this.pictureBox17.Image = imgMorphologyEx.ToBitmap();


            // Otsu二值化
            Image <Gray, byte> imgThresholdOtsu = new Image <Gray, byte>(imgGray.Size);

            CvInvoke.Threshold(imgMorphologyEx, imgThresholdOtsu, 0, 255, ThresholdType.Otsu);

            List <RotatedRect>    boxList  = new List <RotatedRect>(); //a box is a rotated rectangle
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(imgThresholdOtsu, contours, null, RetrType.List,
                                  ChainApproxMethod.ChainApproxSimple);

            Image <Bgr, byte> imgResult = new Image <Bgr, byte>(imgGray.Size);

            CvInvoke.CvtColor(imgThresholdOtsu, imgResult, ColorConversion.Gray2Bgr);
            MCvScalar oScaler = new MCvScalar(40, 255, 255, 255);
            int       count   = contours.Size;

            for (int i = 0; i < count; i++)
            {
                using (VectorOfPoint contour = contours[i])
                    using (VectorOfPoint approxContour = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                        double dArea = CvInvoke.ContourArea(approxContour, false);
                        if (dArea > imgThresholdOtsu.Rows * imgThresholdOtsu.Cols / 3d)
                        {
                            if (approxContour.Size == 4)
                            {
                                #region determine if all the angles in the contour are within [80, 100] degree
                                bool isRectangle             = true;
                                System.Drawing.Point[] pts   = approxContour.ToArray();
                                LineSegment2D[]        edges = Emgu.CV.PointCollection.PolyLine(pts, true);

                                for (int j = 0; j < edges.Length; j++)
                                {
                                    double angle = Math.Abs(
                                        edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                    if (angle < 80 || angle > 100)
                                    {
                                        isRectangle = false;
                                        break;
                                    }
                                }
                                #endregion

                                if (isRectangle)
                                {
                                    CvInvoke.DrawContours(imgResult, contours, i, oScaler, 3);
                                }
                            }
                        }
                    }
            }
            this.pictureBox18.Image = imgResult.ToBitmap();
        }
示例#17
0
        readonly Mat kernelClosing = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1));//运算核
        #endregion

        /// <summary>
        ///  获取直线集
        /// </summary>
        /// <param name="img"></param>
        /// <param name="cameraID">0=右侧,1=前方</param>
        /// <returns>Hough直线集</returns>
        public LineSegment2D[] GetLinesByHough(Bitmap img, int cameraID)
        {
            #region 灰度处理
            //灰度化
            grayImg  = new Image <Gray, byte>(img).PyrDown().PyrUp();
            remapImg = grayImg.CopyBlank();//映射后图像
            //获取畸变参数
            if (cameraID == 0)
            {
                GetRightCamParams();//获取右侧相机参数
            }
            else
            {
                GetFrontCamParams();//获取前方相机参数
            }

            //畸变校正
            try
            {
                CvInvoke.InitUndistortRectifyMap(cameraMatrix, distCoeffs, null, cameraMatrix, imageSize, DepthType.Cv32F, mapx, mapy);
                CvInvoke.Remap(grayImg, remapImg, mapx, mapy, Inter.Linear, BorderType.Reflect101, new MCvScalar(0));
            }
            catch (Exception ex)
            {
                throw (ex);
            }

            //Image<Gray, byte> roiBinary = GetROI(grayImg, ROI);//控制是否需要畸变校正

            //二值化
            binaryImg = grayImg.CopyBlank();                                         //创建一张和灰度图一样大小的画布
            CvInvoke.Threshold(remapImg, binaryImg, 200, 255, ThresholdType.Binary); //控制是否需要畸变校正
            //Closing
            Image <Gray, byte> closingImg = binaryImg.CopyBlank();                   //闭运算后图像
            CvInvoke.MorphologyEx(binaryImg, closingImg, MorphOp.Open, kernelClosing, new Point(-1, -1), 5, BorderType.Default, new MCvScalar(255, 0, 0, 255));
            #endregion

            #region 去除白色不相干区域块
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();                                          //区块集合
            Image <Gray, byte>    dnc      = new Image <Gray, byte>(closingImg.Width, closingImg.Height);
            CvInvoke.FindContours(closingImg, contours, dnc, RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple); //轮廓集合
            for (int k = 0; k < contours.Size; k++)
            {
                double area = CvInvoke.ContourArea(contours[k]); //获取各连通域的面积
                if (area < 250000)                               //根据面积作筛选(指定最小面积,最大面积):
                {
                    CvInvoke.FillConvexPoly(closingImg, contours[k], new MCvScalar(0));
                }
            }
            #endregion

            #region 边缘检测
            edageImg = closingImg.CopyBlank();
            CvInvoke.Canny(closingImg, edageImg, cannyThreshold, cannyThresholdLinking);
            edageImg.SmoothMedian(5);
            #endregion
            #region HoughLinesP
            //HoughLineP
            lines = CvInvoke.HoughLinesP(
                edageImg,
                1,               //Distance resolution in pixel-related units
                Math.PI / 180.0, //Angle resolution measured in radians.
                100,             //threshold
                100,             //min Line width
                10);             //gap between lines);
            #endregion
            return(lines);
        }
示例#18
0
        private void DisplayWebcam()
        {
            while (_capture.IsOpened)
            {
                Mat  sourceFrame  = _capture.QueryFrame();
                var  blurredImage = new Mat();
                int  newHeight    = (sourceFrame.Size.Height * emguPictureBox.Size.Width) / sourceFrame.Size.Width;
                Size newSize      = new Size(emguPictureBox.Size.Width, newHeight);
                CvInvoke.Resize(sourceFrame, sourceFrame, newSize);
                emguPictureBox.Image = sourceFrame.Bitmap;
                Mat sourceFrameWithArt = sourceFrame.Clone();
                // create an image version of the source frame, will be used when warping the image
                Image <Bgr, byte> sourceFrameWarped = sourceFrame.ToImage <Bgr, byte>();
                // Isolating the ROI: convert to a gray, apply binary threshold:
                Image <Gray, byte> grayImg = sourceFrame.ToImage <Gray, byte>().ThresholdBinary(new Gray(125), new
                                                                                                Gray(255));
                CvInvoke.GaussianBlur(grayImg.Mat, blurredImage, new Size(9, 9), 0);
                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    // Build list of contours
                    CvInvoke.FindContours(blurredImage, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                    // Selecting largest contour
                    if (contours.Size > 0)
                    {
                        double maxArea   = 0;
                        int    chosen    = 0;
                        int    numberc   = 0;
                        int    numberm   = 0;
                        int    framwith  = 0;
                        int    framhight = 0;
                        bool   iSq       = false;
                        for (int i = 0; i < contours.Size; i++)
                        {
                            VectorOfPoint contour = contours[i];
                            double        area    = CvInvoke.ContourArea(contour);
                            if (area > maxArea)
                            {
                                maxArea = area;
                                chosen  = i;
                            }
                            numberc++;
                            if (area >= 1000 && area < 10000)
                            {
                                // Getting minimal rectangle which contains the contour
                                Rectangle boundingBox = CvInvoke.BoundingRectangle(contours[chosen]);
                                // Draw on the display frame
                                if (area > 2000)
                                {
                                    iSq = true;
                                }
                                if (area > 2000 && st == false)
                                {
                                    ISsqu = true;
                                }
                                else if (st == false)
                                {
                                    ISsqu = false;
                                }
                                Point center = MarkDetectedObject(sourceFrameWithArt, contours[chosen], boundingBox, area, iSq);
                                // Create a slightly larger bounding rectangle, we'll set it as the ROI for later warping
                                sourceFrameWarped.ROI = new Rectangle((int)Math.Min(0, boundingBox.X - 30),
                                                                      (int)Math.Min(0, boundingBox.Y - 30),
                                                                      (int)Math.Max(sourceFrameWarped.Width - 1, boundingBox.X +
                                                                                    boundingBox.Width + 30),
                                                                      (int)Math.Max(sourceFrameWarped.Height - 1, boundingBox.Y +
                                                                                    boundingBox.Height + 30));
                                // Display the version of the source image with the added artwork, simulating ROI focus:
                                //roiPictureBox.Image = sourceFrameWithArt.Bitmap;
                                // Warp the image, output it
                                //warpedPictureBox.Image = WarpImage(sourceFrameWarped, contours[chosen]).Bitmap;
                                numberm++;
                                if (st == false)
                                {
                                    xpix      = center.X;
                                    ypix      = center.Y;
                                    framwith  = 389;
                                    framhight = 328;
                                    st        = true;
                                }
                            }
                        }

                        if (st == true)
                        {
                            // prosses the infomation from the images in to the coordinates
                            x           = xpix * 11.5;
                            y           = ypix * 6.5;
                            x           = (x / 389) - 5.5;
                            y           = (y / 328) + 3.5 + 6.5;
                            angle       = Math.Atan(x / y) * 57.296;
                            angle       = (angle + 90);
                            dis         = (Math.Sqrt((x * x) + (y * y)) - 7) * 10.0;
                            disconver   = Convert.ToInt32(dis);
                            angleconver = Convert.ToInt32(angle);
                            if (ISsqu == true)
                            {
                                ISsquint = 1;
                            }
                            else
                            {
                                ISsquint = 0;
                            }
                            Invoke(new Action(() =>
                            {
                                //input coordinates to the text boxs to be sended Arduimo
                                X1.Text = "" + disconver.ToString();
                                Y1.Text = "" + angleconver.ToString();
                                SQ.Text = "" + ISsquint.ToString();
                            }));
                        }

                        if (enableCoordinateSending == true)
                        {
                            Send();
                        }

                        // outputs the coordinates to the useser
                        Invoke(new Action(() =>
                        {
                            countc.Text = $"Number of contours " + numberc;
                        }));
                        Invoke(new Action(() =>
                        {
                            countm.Text = $"Number of metal shape contours " + numberm;
                        }));
                        Invoke(new Action(() =>
                        {
                            out_put.Text = $"the angle in degrees " + angle + " the distance from point of rotation " + dis;
                        }));
                    }
                }
            }
        }
示例#19
0
        public bool FindMark(string imgFile)
        {
            try
            {
                bool result      = false;
                Mat  originalImg = CvInvoke.Imread(imgFile, ImreadModes.AnyColor);

                //获取原始图像的宽和高
                this._imgWidth  = originalImg.Width;
                this._imgHeight = originalImg.Height;
                //截取对象区域
                Mat cutImg = new Mat(originalImg, new Range(InputManager.AreaStartY, InputManager.AreaEndY), new Range(InputManager.AreaStartX, InputManager.AreaEndX));
                cutImg.Save(InputManager.LogFolder + "cutImg.png");
                //Convert the image to grayscale and filter out the noise
                Mat binaryImg = new Mat();

                //use image pyr to remove noise
                UMat pyrDown = new UMat();
                CvInvoke.PyrDown(cutImg, pyrDown);
                CvInvoke.PyrUp(pyrDown, cutImg);

                //convert to binary image
                CvInvoke.Threshold(cutImg, binaryImg, 100, 255, ThresholdType.BinaryInv);
                //save binary image
                binaryImg.Save(InputManager.LogFolder + "BinaryImg.png");

                #region Canny and edge detection

                Stopwatch watch          = Stopwatch.StartNew();
                double    cannyThreshold = 180.0;

                watch.Reset();
                watch.Start();
                double cannyThresholdLinking = 120.0;
                UMat   cannyEdges            = new UMat();
                CvInvoke.Canny(cutImg, cannyEdges, cannyThreshold, cannyThresholdLinking);
                cannyEdges.Save(InputManager.LogFolder + "cannyEdges.png");

                LineSegment2D[] lines = CvInvoke.HoughLinesP(
                    cannyEdges,
                    1,              //Distance resolution in pixel-related units
                    Math.PI / 45.0, //Angle resolution measured in radians.
                    20,             //threshold
                    30,             //min Line width
                    10);            //gap between lines

                watch.Stop();
                #endregion

                #region Find rectangles
                watch.Reset();
                watch.Start();
                List <RotatedRect>   boxList     = new List <RotatedRect>(); //a box is a rotated rectangle
                List <VectorOfPoint> contourList = new List <VectorOfPoint>();
                VectorOfPoint        markContour = new VectorOfPoint();

                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                    int count = contours.Size;
                    for (int i = 0; i < count; i++)
                    {
                        using (VectorOfPoint contour = contours[i])
                            using (VectorOfPoint approxContour = new VectorOfPoint())
                            {
                                CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                                if (CvInvoke.ContourArea(approxContour, false) > _minContourArea) //only consider contours with area greater than 250
                                {
                                    if (approxContour.Size == 6)                                  //The contour has 6 vertices.
                                    {
                                        #region determine if all the angles in the contour are within [80, 100] degree
                                        bool            isRectangle = true;
                                        Point[]         pts         = approxContour.ToArray();
                                        LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                        for (int j = 0; j < edges.Length; j++)
                                        {
                                            double angle = Math.Abs(
                                                edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                            if (angle < 80 || angle > 100)
                                            {
                                                isRectangle = false;
                                                break;
                                            }
                                        }
                                        #endregion

                                        if (isRectangle)
                                        {
                                            boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                            contourList.Add(approxContour);
                                            break;
                                        }
                                    }
                                }
                            }
                    }
                }

                watch.Stop();
                #endregion

                #region draw rectangles

                Mat markImage = cutImg;

                PointF markCenter = new PointF();
                double markAngle  = 0;

                if (boxList.Count > 0)
                {
                    result     = true;
                    markCenter = boxList[0].Center;
                    markAngle  = Math.Round(boxList[0].Angle, 3);

                    //重新映射至原始图像并计算相对测头的位移
                    this.OutputManager.AlignmentX    = markCenter.X + this.InputManager.AreaStartX + this.InputManager.BondpadCenterX - this._imgWidth / 2.0 - this.InputManager.Probe2CCDX;
                    this.OutputManager.AlignmentY    = markCenter.Y + this.InputManager.AreaStartY + this.InputManager.BondpadCenterY - this._imgHeight / 2.0 - this.InputManager.Probe2CCDY;
                    this.OutputManager.AlignmentSita = markAngle - this.InputManager.Probe2CCDSita;
                    this.OutputManager.MarkImgFile   = InputManager.LogFolder + "outputMarkImage.png";

                    Point[] markContours = RemapMarkContours(boxList);
                    Mat     outputImg    = new Mat();
                    CvInvoke.CvtColor(originalImg, outputImg, ColorConversion.Gray2Bgr);
                    CvInvoke.Polylines(outputImg, markContours, true, new Bgr(Color.Red).MCvScalar, 2);
                    outputImg.Save(this.OutputManager.MarkImgFile);
                }
                #endregion
                return(result);
            }
            catch (Exception e)
            {
                throw e;
            }
        }
示例#20
0
        private static List <TextBox> GetTextBoxes(DisposableNamedOnnxValue[] outputTensor, int rows, int cols, ScaleParam s, float boxScoreThresh, float boxThresh, float unClipRatio)
        {
            float          maxSideThresh = 3.0f;//长边门限
            List <TextBox> rsBoxes       = new List <TextBox>();

            //-----Data preparation-----
            float[]     predData = outputTensor[0].AsEnumerable <float>().ToArray();
            List <byte> cbufData = new List <byte>();

            foreach (float data in predData)
            {
                var val = data * 255;
                cbufData.Add(Convert.ToByte(val));
            }
            Mat predMat = new Mat(rows, cols, DepthType.Cv32F, 1);

            predMat.SetTo(predData);

            Mat cbufMat = new Mat(rows, cols, DepthType.Cv8U, 1);

            cbufMat.SetTo(cbufData.ToArray());

            //-----boxThresh-----
            Mat thresholdMat = new Mat();

            CvInvoke.Threshold(cbufMat, thresholdMat, boxThresh * 255.0, 255.0, ThresholdType.Binary);

            //-----dilate-----
            Mat dilateMat     = new Mat();
            Mat dilateElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(2, 2), new Point(-1, -1));

            CvInvoke.Dilate(thresholdMat, dilateMat, dilateElement, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(128, 128, 128));

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(dilateMat, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);

            for (int i = 0; i < contours.Size; i++)
            {
                if (contours[i].Size <= 2)
                {
                    continue;
                }
                float         maxSide = 0;
                List <PointF> minBox  = GetMiniBox(contours[i], out maxSide);
                if (maxSide < maxSideThresh)
                {
                    continue;
                }
                double score = GetScore(contours[i], predMat);
                if (score < boxScoreThresh)
                {
                    continue;
                }
                List <Point> clipBox = Unclip(minBox, unClipRatio);
                if (clipBox == null)
                {
                    continue;
                }

                List <PointF> clipMinBox = GetMiniBox(clipBox, out maxSide);
                if (maxSide < maxSideThresh + 2)
                {
                    continue;
                }
                List <Point> finalPoints = new List <Point>();
                foreach (var item in clipMinBox)
                {
                    int x   = (int)(item.X / s.ScaleWidth);
                    int ptx = Math.Min(Math.Max(x, 0), s.SrcWidth);

                    int   y     = (int)(item.Y / s.ScaleHeight);
                    int   pty   = Math.Min(Math.Max(y, 0), s.SrcHeight);
                    Point dstPt = new Point(ptx, pty);
                    finalPoints.Add(dstPt);
                }

                TextBox textBox = new TextBox();
                textBox.Score  = (float)score;
                textBox.Points = finalPoints;
                rsBoxes.Add(textBox);
            }
            rsBoxes.Reverse();
            return(rsBoxes);
        }
示例#21
0
        private void COVID19Test_Click(object sender, EventArgs e)
        {
            try
            {
                if (!IMGDict.ContainsKey("input"))
                {
                    throw new Exception("Selct an image first.");
                }

                var img        = IMGDict["input"].SmoothGaussian(3);
                var edges      = img.Convert <Gray, byte>().Canny(150, 50);
                Mat morphology = new Mat();
                CvInvoke.MorphologyEx(edges, morphology, MorphOp.Close, Mat.Ones(5, 5, DepthType.Cv8U, 1),
                                      new Point(-1, -1), 3, BorderType.Default, new MCvScalar(0));

                VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
                Mat h = new Mat();
                CvInvoke.FindContours(morphology, contours, h, RetrType.Tree, ChainApproxMethod.ChainApproxSimple);

                var preprocessed = edges.CopyBlank();
                var data         = h.GetData();
                for (int r = 0; r < data.GetLength(0); r++)
                {
                    for (int c = 0; c < data.GetLength(1); c++)
                    {
                        if ((((int)data.GetValue(r, c, 2))) == -1 &&
                            (((int)data.GetValue(r, c, 3)) > -1))
                        {
                            var bbox = CvInvoke.BoundingRectangle(contours[c]);
                            var AR   = bbox.Width / (float)bbox.Height;
                            if (AR <= 2.0)
                            {
                                CvInvoke.DrawContours(preprocessed, contours, c, new MCvScalar(255), -1);
                            }
                        }
                    }
                }

                var output1 = edges.CopyBlank();
                CvInvoke.Dilate(preprocessed, output1, Mat.Ones(10, 1, DepthType.Cv8U, 1), new Point(-1, -1),
                                1, BorderType.Default, new MCvScalar(0));

                contours.Clear();
                CvInvoke.FindContours(output1, contours, h, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                var finaloutput = edges.CopyBlank();
                for (int i = 0; i < contours.Size; i++)
                {
                    var bbox = CvInvoke.BoundingRectangle(contours[i]);
                    if (bbox.Height > (bbox.Width * 3))
                    {
                        CvInvoke.DrawContours(finaloutput, contours, i, new MCvScalar(255), -1);
                        preprocessed.ROI = bbox;
                        int count = CountContours(preprocessed);
                        preprocessed.ROI = Rectangle.Empty;
                        string    msg = "";
                        MCvScalar color;
                        if (count == 2)
                        {
                            msg   = "Negative";
                            color = new MCvScalar(0, 255, 0);
                        }
                        else
                        {
                            msg   = "Positive";
                            color = new MCvScalar(0, 0, 255);
                        }
                        int margin = 50;
                        CvInvoke.PutText(img, msg, new(bbox.X - margin, bbox.Y - margin), FontFace.HersheyPlain, 2.5, color, 3);
                    }
                }

                pictureBox1.Image = img.ToBitmap();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
示例#22
0
        public void findSilhouettes(Image <Gray, Byte> input)
        {
            Image <Gray, Byte> fatInput    = morphologicalOperations(input);
            List <Silhouette>  silhouettes = new List <Silhouette>();

            numTargets = 0;

            var inputSize = input.Size;

            //contours method:
            Image <Gray, Byte>    filter   = new Image <Gray, byte>(inputSize);
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(fatInput, contours, null, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);
            for (int i = 0; i < contours.Size; i++)
            {
                double a = CvInvoke.ContourArea(contours[i]);  //  Find the area of contour
                if (a < minPixels)
                {
                    continue;
                }

                MCvScalar white = new MCvScalar(255, 255, 255);
                filter = new Image <Gray, Byte>(inputSize);
                CvInvoke.DrawContours(filter, contours, i, white, -1);
                Silhouette temp = new Silhouette(input.Copy(filter), 0);
                temp.compute();
                if (temp.count < minPixels)
                {
                    continue;
                }
                if (temp.linearness < minLinearness)
                {
                    continue;
                }
                if (temp.gappiness < minGappiness)
                {
                    continue;
                }

                var moment = CvInvoke.Moments(contours[i], true);
                temp.centroid.X = (int)(moment.M10 / moment.M00);
                temp.centroid.Y = (int)(moment.M01 / moment.M00);
                temp.centroid   = temp.findTop();
                silhouettes.Add(temp);
                numTargets++;
            }

            ////recursive method:
            //for (int i = input.Rows - 1; i >= 0; i--)
            //{
            //    for (int j = input.Cols - 1; j >= 0; j--)
            //    {
            //        if (inputData[i, j, 0] == 255)
            //        {
            //            count = 0;
            //            Image<Gray, Byte> temp = new Image<Gray, Byte>(inputSize);
            //            outputData = temp.Data;

            //            crawlFrom(i, j);
            //            if (count > minPixels)
            //            {
            //                Silhouette s = new Silhouette(temp, count);
            //                if (s.linearness() / (float)s.count > minLinearness)
            //                    silhouettes.Add(s);
            //            }
            //        }
            //    }
            //}

            //filter out low linearness silhouettes:
            //silhouettes = silhouettes.Where(s => s.linearness() / (float)s.count > minLinearness).ToList();

            //move local variable to global:
            this.silhouettes = silhouettes;
        }
示例#23
0
        private void toolStripMenuItemPregnancyTest_Click(object sender, EventArgs e)
        {
            try
            {
                if (!IMGDict.ContainsKey("input"))
                {
                    throw new Exception("Read an image");
                }

                double threshold = 300;

                var img    = IMGDict["input"].Clone().SmoothGaussian(3);
                var binary = img.Convert <Gray, byte>()
                             .ThresholdBinaryInv(new Gray(240), new Gray(255));

                VectorOfVectorOfPoint contours         = new VectorOfVectorOfPoint();
                VectorOfVectorOfPoint filteredContours = new VectorOfVectorOfPoint();
                Mat hierarchy = new Mat();

                CvInvoke.FindContours(binary, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                var output = binary.CopyBlank();

                for (int i = 0; i < contours.Size; i++)
                {
                    var area = CvInvoke.ContourArea(contours[i]);
                    if (area > threshold)
                    {
                        filteredContours.Push(contours[i]);
                        //CvInvoke.DrawContours(output, contours, i, new MCvScalar(255), 2);
                    }
                }

                for (int i = 0; i < filteredContours.Size; i++)
                {
                    var bbox = CvInvoke.BoundingRectangle(filteredContours[i]);
                    binary.ROI = bbox;
                    var rects = ProcessParts(binary);
                    binary.ROI = Rectangle.Empty;

                    int count = rects.Count;

                    string    msg   = "";
                    int       marin = 25;
                    MCvScalar color = new MCvScalar(0, 255, 0);

                    switch (count)
                    {
                    case 1:
                        msg   = "Invalid";
                        color = new MCvScalar(0, 0, 255);
                        break;

                    case 2:
                        if (rects[0].Width * rects[0].Height < rects[1].Width * rects[1].Height)
                        {
                            msg = "Not Pregnant";
                        }
                        else
                        {
                            msg = "Invalid";
                        }
                        color = new MCvScalar(0, 0, 255);
                        break;

                    case 3:
                        msg   = "Pregnant";
                        color = new MCvScalar(0, 255, 0);
                        break;

                    default:
                        msg   = "Invalid/Not pregnant";
                        color = new MCvScalar(0, 0, 255);
                        break;
                    }
                    CvInvoke.PutText(img, msg, new Point(bbox.X + bbox.Width + marin, bbox.Y + marin),
                                     FontFace.HersheyPlain, 1.5, color, 2);
                }
                // add Cv.Binary
                pictureBox1.Image = img.ToBitmap();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
        //object tracking function
        private void track(mark themarks, Mat threshold, Mat HsvImage, Image <Bgr, byte> Frame, DepthImagePixel[] depthPixels)
        {
            CvInvoke.PutText(Frame, "Tracking Started", new System.Drawing.Point(0, 50), FontFace.HersheyComplex, 1, new Bgr(0, 255, 0).MCvScalar, 2);
            //create temporary image
            Mat temp = new Mat();

            threshold.CopyTo(temp);
            List <mark> peices      = new List <mark>();
            bool        objectfound = false;

            //vectors for Findcontours

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            //find contours of filtered image using openCV findContours function
            CvInvoke.FindContours(temp, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
            int count = contours.Size;

            for (int i = 0; i < count; i++)
            {
                //marks
                mark peice = new mark();
                using (VectorOfPoint contour = contours[i])
                    using (VectorOfPoint approxContour = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);

                        if (CvInvoke.ContourArea(approxContour, false) > 400) //only consider contours with area greater than 400
                        {
                            if (approxContour.Size == 3)                      //The contour has 3 vertices, it is a triangle
                            {
                                System.Drawing.Point[] pts     = approxContour.ToArray();
                                MCvMoments             moments = CvInvoke.Moments(contours[i]);
                                double area = moments.M00;

                                //objectFound = true;

                                peice.setxpos(Convert.ToInt32(moments.M10 / area));
                                peice.setypos(Convert.ToInt32(moments.M01 / area));

                                //check to see what position the peice is in
                                for (int j = 0; j <= 2; j++)
                                {
                                    for (int k = 0; k <= 2; k++)
                                    {
                                        //if its x position is within a half of square size pixels then boardx and y are
                                        if (Math.Abs(gboard[j, k].getxpos() - peice.getxpos()) <= BOARD_SIZE / 2)
                                        {
                                            if (Math.Abs(gboard[j, k].getypos() - peice.getypos()) <= BOARD_SIZE / 2)
                                            {
                                                peice.setboardx(k);
                                                peice.setboardy(j);
                                                //Console.WriteLine(peice.getboardx());
                                                //Console.WriteLine(peice.getboardx());
                                            }
                                        }
                                    }
                                }
                                //set peice type to triangle (1)
                                peice.settype(1);

                                peices.Add(peice);
                                objectfound = true;
                            }

                            else if (approxContour.Size == 4) //The contour has 4 vertices.
                            {
                                #region determine if all the angles in the contour are within [80, 100] degree
                                bool isRectangle             = true;
                                System.Drawing.Point[] pts   = approxContour.ToArray();
                                LineSegment2D[]        edges = Emgu.CV.PointCollection.PolyLine(pts, true);

                                for (int j = 0; j < edges.Length; j++)
                                {
                                    double angle = Math.Abs(
                                        edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                    if (angle < 80 || angle > 100)
                                    {
                                        isRectangle = false;
                                        break;
                                    }
                                }
                                #endregion

                                if (isRectangle)
                                {
                                    MCvMoments moments = CvInvoke.Moments(contours[i]);
                                    double     area    = moments.M00;
                                    peice.setxpos(Convert.ToInt32(moments.M10 / area));
                                    peice.setypos(Convert.ToInt32(moments.M01 / area));
                                    //Console.WriteLine(peice.getxpos());

                                    //Console.WriteLine(peice.getxpos());
                                    //Console.WriteLine(peice.getypos());
                                    //set peice board positions
                                    for (int j = 0; j <= 2; j++)
                                    {
                                        for (int k = 0; k <= 2; k++)
                                        {
                                            //if its x position is within a half of square size pixels then boardx and y are
                                            if (Math.Abs(gboard[j, k].getxpos() - peice.getxpos()) <= BOARD_SIZE / 2)
                                            {
                                                if (Math.Abs(gboard[j, k].getypos() - peice.getypos()) <= BOARD_SIZE / 2)
                                                {
                                                    peice.setboardx(k);
                                                    peice.setboardy(j);
                                                    //Console.WriteLine(peice.getboardx());
                                                }
                                            }
                                        }
                                    }
                                    //set peice type to square
                                    peice.settype(2);
                                    peices.Add(peice);
                                    objectfound = true;
                                    armtargetx  = peice.getxpos();
                                    armtargety  = peice.getypos();
                                }
                            }
                        }
                    }
            }



            //Console.WriteLine(peices.ElementAt(1).getboardx());
            if (objectfound)
            {
                //Drawingfunction to show peices
                draw(peices, Frame);
                //check to see if board state has changed after player locks in move
                if (Lockin)
                {
                    boardchange(peices);
                }

                depthfunction(peices, depthPixels);
            }

            if (turn)
            {
                //if its arms turn then figure out where to move
                strategy();
            }



            //find depth values for returned targets
        }
示例#25
0
        public void PerformShapeDetection()
        {
            if (fileNameTextBox.Text != String.Empty)
            {
                StringBuilder msgBuilder = new StringBuilder("Performance: ");

                //Load the image from file and resize it for display
                Image <Bgr, Byte> img =
                    new Image <Bgr, byte>(fileNameTextBox.Text)
                    .Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true);

                //Convert the image to grayscale and filter out the noise
                UMat uimage = new UMat();
                CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

                //use image pyr to remove noise
                UMat pyrDown = new UMat();
                CvInvoke.PyrDown(uimage, pyrDown);
                CvInvoke.PyrUp(pyrDown, uimage);

                //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp();

                #region circle detection
                Stopwatch watch                      = Stopwatch.StartNew();
                double    cannyThreshold             = 180.0;
                double    circleAccumulatorThreshold = 120;
                CircleF[] circles                    = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);

                watch.Stop();
                msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                #region Canny and edge detection
                watch.Reset(); watch.Start();
                double cannyThresholdLinking = 120.0;
                UMat   cannyEdges            = new UMat();
                CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);

                LineSegment2D[] lines = CvInvoke.HoughLinesP(
                    cannyEdges,
                    1,              //Distance resolution in pixel-related units
                    Math.PI / 45.0, //Angle resolution measured in radians.
                    20,             //threshold
                    30,             //min Line width
                    10);            //gap between lines

                watch.Stop();
                msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                #region Find triangles and rectangles
                watch.Reset(); watch.Start();
                List <Triangle2DF> triangleList = new List <Triangle2DF>();
                List <RotatedRect> boxList      = new List <RotatedRect>(); //a box is a rotated rectangle

                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                    int count = contours.Size;
                    for (int i = 0; i < count; i++)
                    {
                        using (VectorOfPoint contour = contours[i])
                            using (VectorOfPoint approxContour = new VectorOfPoint())
                            {
                                CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                                if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
                                {
                                    if (approxContour.Size == 3)                      //The contour has 3 vertices, it is a triangle
                                    {
                                        Point[] pts = approxContour.ToArray();
                                        triangleList.Add(new Triangle2DF(
                                                             pts[0],
                                                             pts[1],
                                                             pts[2]
                                                             ));
                                    }
                                    else if (approxContour.Size == 4) //The contour has 4 vertices.
                                    {
                                        #region determine if all the angles in the contour are within [80, 100] degree
                                        bool            isRectangle = true;
                                        Point[]         pts         = approxContour.ToArray();
                                        LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                        for (int j = 0; j < edges.Length; j++)
                                        {
                                            double angle = Math.Abs(
                                                edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                            if (angle < 80 || angle > 100)
                                            {
                                                isRectangle = false;
                                                break;
                                            }
                                        }
                                        #endregion

                                        if (isRectangle)
                                        {
                                            boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                        }
                                    }
                                }
                            }
                    }
                }

                watch.Stop();
                msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                originalImageBox.Image = img;
                this.Text = msgBuilder.ToString();

                #region draw triangles and rectangles
                Image <Bgr, Byte> triangleRectangleImage = img.CopyBlank();
                foreach (Triangle2DF triangle in triangleList)
                {
                    triangleRectangleImage.Draw(triangle, new Bgr(Color.DarkBlue), 2);
                }
                foreach (RotatedRect box in boxList)
                {
                    triangleRectangleImage.Draw(box, new Bgr(Color.DarkOrange), 2);
                }
                triangleRectangleImageBox.Image = triangleRectangleImage;
                #endregion

                #region draw circles
                Image <Bgr, Byte> circleImage = img.CopyBlank();
                foreach (CircleF circle in circles)
                {
                    circleImage.Draw(circle, new Bgr(Color.Brown), 2);
                }
                circleImageBox.Image = circleImage;
                #endregion

                #region draw lines
                Image <Bgr, Byte> lineImage = img.CopyBlank();
                foreach (LineSegment2D line in lines)
                {
                    lineImage.Draw(line, new Bgr(Color.Green), 2);
                }
                lineImageBox.Image = lineImage;
                #endregion
            }
        }
示例#26
0
        public List <string> ProcessBodyImage(bool normAny = false)
        {
            Image <Bgr, byte> baseImg = new Image <Bgr, byte>(image);

            Normalization <Bgr> rgb_norm = new Normalization <Bgr>(baseImg);
            var normalized = rgb_norm.Normalize();

            baseImg = rgb_norm.Result;
            //saveImage(baseImg, Path.GetFileName(imagePath)+"base");

            Image <Gray, byte> grayImg = baseImg.Convert <Gray, byte>();

            //saveImage(grayImg, Path.GetFileName(imagePath)+"gray");
            if (normalized)
            {
                Normalization <Gray> gray_norm = new Normalization <Gray>(grayImg);
                if (normAny)
                {
                    gray_norm.NormalizeAny();
                }
                grayImg = gray_norm.Result;
                //saveImage(grayImg, Path.GetFileName(imagePath) + "gray_norm");
            }

            Image <Gray, byte> canny = grayImg.Canny(175, 320);
            //saveImage(canny, "canny");

            //detecting bounding boxes
            var aContours  = new VectorOfVectorOfPoint();
            var aHierarchy = new Mat();

            CvInvoke.FindContours(canny, aContours, aHierarchy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.LinkRuns, new Point(0, 0));

            List <Rectangle> boxes = new List <Rectangle>();

            for (int i = 0; i < aContours.Size; i++)
            {
                var          item   = aContours[i];
                List <Point> points = new List <Point>();
                for (int j = 0; j < item.Size; j++)
                {
                    var item2 = item[j];
                    points.Add(new Point(item2.X, item2.Y));
                }
                var x_query = from Point p in points select p.X;
                int xmin    = x_query.Min();
                int xmax    = x_query.Max();

                var y_query = from Point p in points select p.Y;
                int ymin    = y_query.Min();
                int ymax    = y_query.Max();

                Rectangle r = new Rectangle(xmin, ymin, xmax - xmin, ymax - ymin);
                boxes.Add(r);
            }
            //saveImage(drawBoxesOnImage(canny.Bitmap, boxes), Path.GetFileName(imagePath)+"test");

            List <Tuple <Rectangle, List <Rectangle> > > itemsToUnite = new List <Tuple <Rectangle, List <Rectangle> > >();

            //check if boxes contact more than 70%, if yes - unite them
            for (int i = 0; i < boxes.Count; i++)
            {
                //contacts = new List<Rectangle>();
                List <Rectangle> unions = new List <Rectangle>();
                for (int j = i + 1; j < boxes.Count; j++)
                {
                    //if (i == j)
                    //    continue;

                    var b1 = boxes[i];
                    var b2 = boxes[j];

                    int dif = 1; //contact differenct

                    //check up/down & left/right contact
                    bool hasContact = false;

                    if (Math.Abs(b1.Bottom - b2.Top) == dif)
                    {
                        Rectangle left  = b1.Left < b2.Left ? b1 : b2;
                        Rectangle right = b1.Right > b2.Right ? b1 : b2;

                        if (left.Right < right.Left)
                        {
                            continue;
                        }

                        hasContact = true;
                    }
                    else if (Math.Abs(b1.Right - b2.Right) == dif)
                    {
                        Rectangle top    = b1.Top < b2.Top ? b1 : b2;
                        Rectangle bottom = b1.Bottom > b2.Bottom ? b1 : b2;

                        if (top.Bottom < bottom.Top)
                        {
                            continue;
                        }

                        hasContact = true;
                    }

                    if (hasContact)
                    {
                        //contacts.Add(b1);
                        //contacts.Add(b2);

                        //check if contact area if more than 70%
                        var length1 = b1.Right - b1.Left;
                        var length2 = b2.Right - b1.Left;
                        var length  = Math.Max(b1.Right, b2.Right) - Math.Min(b1.Left, b2.Left);
                        if (length > 0)
                        {
                            var left_offset  = Math.Max(b1.Left, b2.Left) - Math.Min(b1.Left, b2.Left);
                            var right_offset = Math.Max(b1.Right, b2.Right) - Math.Min(b1.Right, b2.Right);
                            var intersection = length - left_offset - right_offset;

                            var perc = 100 * intersection / (float)length;

                            if (perc >= 70)
                            {
                                unions.Add(b2);
                            }
                        }
                    }
                }
                //if (contacts.Any())
                //    saveImage(drawBoxesOnImage(canny.Bitmap, contacts), "contact_" + i);

                //if (unions.Any())
                itemsToUnite.Add(new Tuple <Rectangle, List <Rectangle> >(boxes[i], unions));

                //if (contacts.Any())
                //    break;
            }
            //saveImage(drawBoxesOnImage(canny.Bitmap, contacts), "contact");

            List <Rectangle> newBoxes = new List <Rectangle>();

            foreach (var item in itemsToUnite)
            {
                if (item.Item2.Any())
                {
                    var lst = item.Item2;
                    lst.Add(item.Item1);
                    Rectangle r = getBoundingBox(lst);
                    newBoxes.Add(r);
                }
                else
                {
                    bool canAdd = true;
                    foreach (var i in itemsToUnite)
                    {
                        if (i.Item2.Contains(item.Item1))
                        {
                            canAdd = false;
                            break;
                        }
                    }
                    if (canAdd)
                    {
                        newBoxes.Add(item.Item1);
                    }
                }
            }
            boxes = newBoxes;
            //saveImage(drawBoxesOnImage(canny.Bitmap, boxes), Path.GetFileName(imagePath) + "unions");

            //filter bounding boxes
            float minHeight = 5;

            boxes.RemoveAll(x => x.Height < minHeight);
            boxes.RemoveAll(x => x.Height < x.Width);
            boxes.RemoveAll(x => x.Height > canny.Height / 2);
            boxes.RemoveAll(x => x.Width < 2);

            //saveImage(drawBoxesOnImage(canny.Bitmap, boxes), Path.GetFileName(imagePath) + "filtered");

            //detecting numbers bounding boxes
            List <Rectangle> sums     = new List <Rectangle>();
            List <Rectangle> lefts    = new List <Rectangle>();
            List <Rectangle> rights   = new List <Rectangle>();
            List <Rectangle> extended = new List <Rectangle>();

            boxes = boxes.OrderBy(x => x.X).ToList();
            for (int i = 0; i < boxes.Count; i++)
            {
                var       box         = boxes[i];
                int       offsetWidth = (int)(box.Width / 3);
                Rectangle offset1     = new Rectangle(box.X - offsetWidth, box.Y, offsetWidth, box.Height),
                          offset2     = new Rectangle(box.X + box.Width, box.Y, offsetWidth, box.Height);

                Rectangle uni = Rectangle.Union(box, offset1);
                uni = Rectangle.Union(uni, offset2);
                extended.Add(uni);

                lefts.Add(offset1);
                rights.Add(offset2);
            }
            //saveImage(drawBoxesOnImage(canny.Bitmap, new Color[] { Color.Red, Color.Green, Color.Blue }, boxes, lefts, rights), "offsets");
            //saveImage(drawBoxesOnImage(canny.Bitmap, extended), Path.GetFileName(imagePath) + "extended");

            List <IntersectionHierarchyItem> intersections = new List <IntersectionHierarchyItem>();

            foreach (var box in extended)
            {
                intersections.Add(findIntersectingHierarchy(extended, box));
            }

            List <Rectangle> result = new List <Rectangle>();

            foreach (var box in intersections)
            {
                if (box.HasIntersection)
                {
                    result.Add(box.Union);
                }
            }

            result = result.Distinct().ToList();
            //filtering horizontal rectangles
            result.RemoveAll(x => x.Width <= x.Height);
            //filtering rectangles by aspect ratio
            result.RemoveAll(x =>
            {
                float aspectRatio = (float)x.Width / (float)x.Height;
                return(aspectRatio > 0.75 && aspectRatio < 1.3);
            });

            //saveImage(drawBoxesOnImage(canny.Bitmap, result), Path.GetFileName(imagePath) + "filtered");

            if (!result.Any())
            {
                if (!normAny)
                {
                    return(ProcessBodyImage(true));
                }

                return(new List <string>());
            }

            List <Rectangle>   bounding = new List <Rectangle>();
            List <Rectangle[]> sRects   = new List <Rectangle[]>();

            List <List <string> > digitVariants = new List <List <string> >();

            //cutting numbers from images
            for (int j = 0; j < result.Count; j++)
            {
                var area = result[j];
                //find source bounding boxes that are inside intersecting area
                List <Rectangle> rects = findInnerRectangles(boxes, area);

                //save(drawBoxesOnImage(canny, rects), imgNumber, "inner1_"+j);

                //remove rectangles that are inside another rect
                rects = removeInnerRectangles(rects);

                //save(drawBoxesOnImage(canny, rects), imgNumber, "inner2_" + j);
                //saveCoords(rects, imgNumber, "inner2_" + j);
                //TODO: do intersection
                rects = merge(rects);
                sRects.Add(rects.ToArray());
                bounding.Add(getBoundingBox(rects));

                //saveImage(drawBoxesOnImage(canny.Bitmap, rects), "inner_" + j);
                //saveCoords(rects, imgNumber, "inner_" + j);

                //distinct list to prevent adding duplicating rectangles after merging
                rects = rects.Distinct().ToList();

                List <string> tesseractParts = new List <string>();
                //cropping each rectangle and saving as image
                if (digitsRecognitionMethod == DigitsRecognitionMethod.Tesseract || digitsRecognitionMethod == DigitsRecognitionMethod.Both)
                {
                    List <string> digitVariant = new List <string>();
                    for (int i = 0; i < rects.Count; i++)
                    {
                        var gray = grayImg.Clone();
                        gray.ROI = rects[i];
                        Mat componentRoi   = gray.Mat;
                        Mat thresholdedMat = gray.Mat;
                        CvInvoke.Threshold(componentRoi, thresholdedMat, 0, 255, Emgu.CV.CvEnum.ThresholdType.Otsu | Emgu.CV.CvEnum.ThresholdType.BinaryInv);

                        string digitLocation = FileManager.TempPng;
                        thresholdedMat.Save(digitLocation);
                        digitVariant.Add(digitLocation);

                        //save(thresholdedMat, imgNumber, "digit_" + j + "_" + i);
                        //save(crop(canny, rects[i]), imgNumber, "digit_" + j + "_" + i);
                    }
                    digitVariants.Add(digitVariant);
                }
            }
            //saveImage(drawBoxesOnImage(canny.Bitmap, bounding), "bb");

            List <string> numbersFinals = new List <string>();

            if (digitsRecognitionMethod == DigitsRecognitionMethod.Tesseract || digitsRecognitionMethod == DigitsRecognitionMethod.Both)
            {
                foreach (var dvar in digitVariants)
                {
                    string file = saveTesseract(dvar);
                    numbersFinals.Add(OCRParser.ParseTesseract(file));
                }
            }
            if (digitsRecognitionMethod == DigitsRecognitionMethod.Neural || digitsRecognitionMethod == DigitsRecognitionMethod.Both)
            {
                //get max campatible bounding box
                //var largestRect = bounding.Aggregate((r1, r2) => (((r1.Height * r1.Width) > (r2.Height * r2.Width)) || ()) ? r1 : r2);
                int           index      = 0;
                List <string> digitPaths = new List <string>();
                if (bounding.Count > 0)
                {
                    int maxArea     = bounding[index].Height * bounding[index].Width;
                    int lastSubs    = sRects[index].Length;
                    int goodAspects = checkGoodLetters(sRects[index]);
                    for (int i = 1; i < bounding.Count; i++)
                    {
                        //exclude elements that contain much more than 5 rectangles inside (this means that rectagles don't represent letters and numbers but other shapes)
                        int subs = sRects[i].Length;
                        if (subs > 5)
                        {
                            continue;
                        }

                        //exclude elements by aspect ratio
                        float       aspectRatio = (float)bounding[i].Width / (float)bounding[i].Height;
                        const float MAX_ASPECT  = 2.4f; //12 / 5
                        const float MIN_ASPECT  = 1.7f;

                        //if (aspectRatio > MAX_ASPECT || aspectRatio < MIN_ASPECT)
                        //    continue;

                        //if (lastSubs > subs)
                        //    continue;

                        int area = bounding[i].Height * bounding[i].Width;
                        if (area > maxArea)
                        {
                            //check letters aspect ratio
                            int lets = checkGoodLetters(sRects[i]);
                            if (lets > goodAspects)
                            {
                                index       = i;
                                maxArea     = area;
                                lastSubs    = subs;
                                goodAspects = lets;
                            }
                        }
                    }

                    //int index = bounding.IndexOf(largestRect);
                    var elems = sRects[index];
                    for (int i = 0; i < elems.Length; i++)
                    {
                        var gray = grayImg.Clone();
                        gray.ROI = elems[i];
                        Mat componentRoi   = gray.Mat;
                        Mat thresholdedMat = gray.Mat;
                        CvInvoke.Threshold(componentRoi, thresholdedMat, 0, 255, Emgu.CV.CvEnum.ThresholdType.Otsu | Emgu.CV.CvEnum.ThresholdType.BinaryInv);

                        /*
                         * int s = (int)(0.05 * mat.Rows); // 5% of up-scaled size
                         *  Mat elem = Cv2.GetStructuringElement(StructuringElementShape.Ellipse, new Size(2 * s + 1, 2 * s + 1), new Point(s, s));
                         *  //Cv2.Erode(mat, mat, elem);
                         */

                        int s    = (int)(0.05 * thresholdedMat.Rows);
                        Mat elem = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Ellipse, new Size(2 * s + 1, 2 * s + 1), new Point(s, s));
                        CvInvoke.Erode(thresholdedMat, thresholdedMat, elem, new Point(s, s), 1, Emgu.CV.CvEnum.BorderType.Reflect, default(MCvScalar));

                        string digitPath = FileManager.TempPng;
                        digitPaths.Add(digitPath);
                        thresholdedMat.Save(digitPath);
                        //save(thresholdedMat, imgNumber, "digit_" + "_" + i);
                    }
                }
                numbersFinals.Add(OCRParser.ParseNeural(digitPaths.ToArray()).Value);
            }

            return(numbersFinals);
        }
示例#27
0
        public static Image <Bgr, byte> FindVertices(Image <Bgr, byte> image, ref Point left, ref Point right)
        {
            #region Canny and edge detection
            UMat cannyEdges = new UMat();

            CvInvoke.Canny(image, cannyEdges, 60, 180);
            #endregion

            List <RotatedRect> boxList = new List <RotatedRect>(); //旋转的矩形框
            Point center = new Point();;
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                //给所有的轮廓画上边框,看都有哪些轮廓
                //CvInvoke.DrawContours(image, contours, -1, new MCvScalar(200, 0, 0), 1);
                int count = contours.Size;

                for (int i = 0; i < count; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                        using (VectorOfPoint approxContour = new VectorOfPoint())
                        {
                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.08, true);
                            var area = CvInvoke.ContourArea(approxContour, false);
                            //仅考虑面积大于100000的轮廓
                            if (area > 100000)
                            {
                                if (approxContour.Size >= 4) //轮廓有4个顶点
                                {
                                    Point[] pts = approxContour.ToArray();
                                    //求四边形中心点?坐标
                                    int x_average = 0;
                                    int y_average = 0;
                                    int x_sum     = 0;
                                    int y_sum     = 0;
                                    for (int j = 0; j < 4; j++)
                                    {
                                        x_sum += pts[j].X;
                                        y_sum += pts[j].Y;
                                    }
                                    x_average = x_sum / 4;
                                    y_average = y_sum / 4;
                                    center    = new Point(x_average, y_average);
                                    for (int j = 0; j < 4; j++)
                                    {
                                        if (pts[j].X < center.X && pts[j].Y < center.Y)
                                        {
                                            left = pts[j];//左上角点
                                        }
                                        if (pts[j].X > center.X && pts[j].Y < center.Y)
                                        {
                                            right = pts[j];//右上角点
                                        }
                                    }
                                    break;
                                }
                            }
                        }
                }
            }
            cannyEdges.Dispose();
            return(image);
        }
示例#28
0
        public bool drawPolygons(Mat webcamVidForOverlay, int currentCamNum, bool onscreenPolyhasChanged = false)
        {
            //1. Noise removal
            //Mat processedFrame = new Mat();         //create a new frame to process
            //we draw the result to OverlayGrid which is accessed by the video output methods
            if (webcamVidForOverlay.IsEmpty || webcamVidForOverlay == null)
            {
                return(true);
            }
            Mat processedFrame = webcamVidForOverlay.Clone();

            CvInvoke.Flip(webcamVidForOverlay, processedFrame, Emgu.CV.CvEnum.FlipType.Horizontal);
            //Mat processedFrame  = webcamVidForOverlay.Clone();


            CvInvoke.CvtColor(processedFrame, processedFrame, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //convert to grayscale
            Mat downSampled = new Mat();                                                                //create a holding mat for downsampling
                                                                                                        //CvInvoke.PyrDown(/*processedFrame, */ overlayGrid, downSampled);  //use pyramid downsample
                                                                                                        //CvInvoke.PyrUp(downSampled, /*processedFrame*/overlayGrid);     //upsample onto the original again

            Mat overlayGrid = new Mat(webcamVidForOverlay.Rows, webcamVidForOverlay.Cols, Emgu.CV.CvEnum.DepthType.Default, 3);

            CvInvoke.Flip(webcamVidForOverlay, overlayGrid, Emgu.CV.CvEnum.FlipType.Horizontal);
            //2. Canny Edge detection
            //double thresholdLink = 80.0;           //value to force rejection/acceptance if pixel is between upper & lower thresh
            double thresholdLow  = 50.0;            //lower brightness threshold 0-> 255 where 255 = white
            double thresholdHigh = 150;             //have a 3:1 upper:lower ratio (per Canny's paper)
            Mat    cannyResult   = new Mat();       //create a holding Mat for the canny edge result

            CvInvoke.Canny(processedFrame, cannyResult, thresholdLow, thresholdHigh);

            //thresholding
            cannyResult_out = cannyResult;

            //3. Hough Probabilistic Transform
            //Uses probabilistic methods to determine all the polygons in the image
            //use (rho, theta) polar coordinate plane
            //LineSegment2D[] polygonSegment = CvInvoke.HoughLinesP(
            //    cannyResult,
            //    1,                  //rho - distance 'vector'
            //    Math.PI / 45.0,     //theta - angle 'vector'
            //    20,                 //threshold for definition of 'line'
            //    30,                 //minimum line width
            //    10);                // gap between lines

            //4. Find the contours


            /*this object is a (x0,y0,x1,y1) vector where (x0,y0) and (x1,y1) are the respective
             * extremities of a line
             */
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();

            CvInvoke.FindContours(
                cannyResult,                                       //the output of the Canny detector
                contours,                                          //the 2D vector of line-points
                hierarchy,                                         //not used since we don't want a hierarchy
                Emgu.CV.CvEnum.RetrType.Tree,                      //retrieve ALL contours, no hierarchy
                Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple //segments are compressed so only endpoints stored
                );

            VectorOfVectorOfPoint polyContours = new VectorOfVectorOfPoint(contours.Size);

            int numContours = contours.Size;  //add each point to an array to curve approximate


            for (int i = 0; i < numContours; i++)
            {
                //Now draw the found rectangles onscreen!


                /*use ApproxPoly to find some epsilon to match all points to,
                 * Use the Ramer-Douglas-Peuker algo to simplify the currentContour curve and store it in
                 * approxContour
                 */

                double arcLengthPoly = CvInvoke.ArcLength(contours[i], true); //for this and later calcs
                CvInvoke.ApproxPolyDP(
                    contours[i],                                              //the current polygon
                    polyContours[i],                                          //the resultant smoothe polygon
                    arcLengthPoly * 0.04,                                     //calculating epsilon by finding the arc length of start -> end
                    true);                                                    //is the contour closed?

                if (polyContours[i].Size == 10)                               //there are 4 contours -> rectangle
                {
                    //for a rectangle, find internal angles, they must be 80 < angle < 100 to be a rectangle
                    bool validRect = true;
                    //Point[] pts = approxContour.ToArray();  //make the contour into an array -> polyline
                    //LineSegment2D[] polyEdges = PointCollection.PolyLine(pts, true);    //store each polygon edge for testing

                    //find the area of the polygon to avoid false positives with very small areas
                    double polygonArea = CvInvoke.ContourArea(contours[i]);
                    double divMaxSize = 0.185, divMinSize = 0.135;

                    if (validRect && polygonArea > 200.0)
                    {
                        double sqrt_area = Math.Sqrt(polygonArea) / arcLengthPoly;

                        if (sqrt_area < divMaxSize && sqrt_area > divMinSize)
                        {
                            //For prediction, store the last 5 points to get an aggregate direction

                            //find the centre of this rectangle & draw it onscreen
                            Point rectCentre = getCentre(polyContours[i]);
                            CvInvoke.Circle(overlayGrid, rectCentre, 5, new MCvScalar(0, 150, 105), -1, Emgu.CV.CvEnum.LineType.AntiAlias);
                            string centreString = "(" + rectCentre.X + "," + rectCentre.Y + ")";
                            CvInvoke.PutText(overlayGrid, centreString, new Point(rectCentre.X + 2, rectCentre.Y + 2), Emgu.CV.CvEnum.FontFace.HersheyComplexSmall, 0.5, new MCvScalar(0, 150, 105), 1, Emgu.CV.CvEnum.LineType.EightConnected, false);

                            //pass the centre of the current rectangle's co-ords to the display & scaling methods
                            scaleDisplayCoordsToGpsBounds(rectCentre.X, rectCentre.Y);
                            //this method sets (p,q) as (lat,long) so we allocate these vars to the UI display vars

                            //if valid, travers the current array of edges and draw them to the screen
                            CvInvoke.Polylines(overlayGrid, polyContours, true, new MCvScalar(0, 255, 0));

                            usingCoords           = true;   //set this to prevent race conditions
                            current_pointGPS_lat  = p;      //set these to separate coords which will be used to display the lat/long on the left UI
                            current_pointGPS_long = q;

                            x = rectCentre.X;               //set so that the drawMarker method can display the trail of points onscreen
                            y = rectCentre.Y;

                            //TEST
                            // if (onscreenPolyhasChanged)
                            // {
                            if (current_gps_point.X != prev_gps_point.X)
                            {
                                current_gps_point = predictPointMotion(x, y, current_gps_point, prev_gps_point);
                                //acccount for the first point marker
                                if (!(current_gps_point.X == 0 || prev_gps_point.X == 0))
                                {
                                    x = current_gps_point.X;       //these are the new predicted points for the shape
                                    y = current_gps_point.Y;
                                }
                            }
                            drawMarker(x, y, overlayGrid, true, currentCamNum);       //draw this point update it
                            //   }
                            //END TEST

                            usingCoords = false;
                        }
                    }
                }
            }                                                    //end numContours

            drawMarker(x, y, overlayGrid, false, currentCamNum); //keep drawing the overlay regardless
            webcamVid = overlayGrid;
            return(true);
        }
示例#29
0
    private void Webcam_ImageGrabbed(object sender, EventArgs e)
    {
        imgBGR = new Mat();

        if (webcam.IsOpened)
        {
            /*** Capture webcam stream ***/
            webcam.Retrieve(imgBGR);

            if (imgBGR.IsEmpty)
            {
                return;
            }

            imgIN = imgBGR.Clone();

            // Isolate unity defined Color range
            CvInvoke.CvtColor(imgIN, imgIN, ColorConversion.Bgr2Hsv); // Convert input to hsv
            //CvInvoke.GaussianBlur(imgIN, imgIN, new Size(25, 25), 0);

            // Applying thresold => getting binary filter => multiply it by input to get back color values
            imgBIN = imgIN.ToImage <Hsv, byte>(); // Binary output
            player1ThresholdOUT = new Mat();      // Player1 Binary Filter
            player2ThresholdOUT = new Mat();      // Player2 Binary Filter

            player1ThresholdOUT = imgBIN.InRange(
                new Hsv(player1Config.minValueH, player1Config.minValueS, player1Config.minValueV),
                new Hsv(player1Config.maxValueH, player1Config.maxValueS, player1Config.maxValueV)).Mat;
            player2ThresholdOUT = imgBIN.InRange(
                new Hsv(player2Config.minValueH, player2Config.minValueS, player2Config.minValueV),
                new Hsv(player2Config.maxValueH, player2Config.maxValueS, player2Config.maxValueV)).Mat;

            // Clearing Filter and Applying opening
            int operationSize = 1;
            structuringElement = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(2 * operationSize + 1, 2 * operationSize + 1), new Point(operationSize, operationSize));

            CvInvoke.Erode(player1ThresholdOUT, player1ThresholdOUT, structuringElement, new Point(-1, -1), 5, BorderType.Constant, new MCvScalar(0));
            CvInvoke.Dilate(player1ThresholdOUT, player1ThresholdOUT, structuringElement, new Point(-1, -1), 5, BorderType.Constant, new MCvScalar(0));

            CvInvoke.Erode(player2ThresholdOUT, player2ThresholdOUT, structuringElement, new Point(-1, -1), 5, BorderType.Constant, new MCvScalar(0));
            CvInvoke.Dilate(player2ThresholdOUT, player2ThresholdOUT, structuringElement, new Point(-1, -1), 5, BorderType.Constant, new MCvScalar(0));

            // Detecting Edges
            player1Contours = new VectorOfVectorOfPoint();
            player1Contour  = new VectorOfPoint();
            double biggestContourArea = 0;
            hierarchy = new Mat();
            CvInvoke.FindContours(player1ThresholdOUT, player1Contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone); // Find player1 Contour using Binary filter
            for (int i = 0; i < player1Contours.Size; i++)
            {
                double a = CvInvoke.ContourArea(player1Contours[i], false);
                if (a > biggestContourArea)
                {
                    biggestContourArea = a;
                    player1Contour     = player1Contours[i];
                }
            }

            player2Contours    = new VectorOfVectorOfPoint();
            player2Contour     = new VectorOfPoint();
            biggestContourArea = 0;
            hierarchy          = new Mat();
            CvInvoke.FindContours(player2ThresholdOUT, player2Contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone); // Find player2 Contour using Binary filter
            for (int i = 0; i < player2Contours.Size; i++)
            {
                double a = CvInvoke.ContourArea(player2Contours[i], false);
                if (a > biggestContourArea)
                {
                    biggestContourArea = a;
                    player2Contour     = player2Contours[i];
                }
            }

            // extract player1 rect pos and rotation
            if (player1Contour.Size > 0)
            {
                // Determine Bounding Rectangle and setting its related values
                boundRec = CvInvoke.MinAreaRect(player1Contour);

                if (boundRec.Size.IsEmpty)
                {
                    playersState = false;
                    return;
                }

                Vector3 currentCenter = new Vector2(boundRec.Center.X, boundRec.Center.Y);

                player1Centers.PushBack(currentCenter);

                // Draw Bounding Rectangle
                if (debugFlag)
                {
                    DrawPointsFRectangle(boundRec.GetVertices(), imgBGR);
                }

                float currentScreenSpacePorcentage = (boundRec.Size.Height / windowHeight) * (boundRec.Size.Width / windowWidth);

                if (currentScreenSpacePorcentage > playersMinimalArea)
                {
                    float currentAngle = boundRec.Angle;

                    if (boundRec.Size.Height < boundRec.Size.Width)
                    {
                        currentAngle = 90 + currentAngle;
                    }

                    // Insert angle value
                    player1Angles.PushBack(currentAngle);

                    // Get averages
                    Vector3 centerDiffAverage = Vector3.zero;
                    for (int i = 0; i < player1Centers.curLength - 1; i++)
                    {
                        centerDiffAverage += player1Centers.data[i + 1] - player1Centers.data[i];
                    }
                    centerDiffAverage /= (player1Centers.curLength - 1);

                    // average angle
                    float angleAverage = 0.0f;
                    foreach (float f in player1Angles.data)
                    {
                        angleAverage += f;
                    }
                    angleAverage /= player1Angles.curLength;

                    // !!! Setting bridge values !!!
                    playersState       = true;
                    player1Velocity    = centerDiffAverage * 1f;
                    player1Velocity.y  = -player1Velocity.y;
                    player1EulerAngles = new Vector3(0, 0, -angleAverage);
                }
            }

            // extract player2 rect pos and rotation
            if (player2Contour.Size > 0)
            {
                // Determine Bounding Rectangle and setting its related values
                boundRec = CvInvoke.MinAreaRect(player2Contour);

                if (boundRec.Size.IsEmpty)
                {
                    playersState = false;
                    return;
                }

                Vector3 currentCenter = new Vector2(boundRec.Center.X, boundRec.Center.Y);

                player2Centers.PushBack(currentCenter);

                // Draw Bounding Rectangle
                if (debugFlag)
                {
                    DrawPointsFRectangle(boundRec.GetVertices(), imgBGR);
                }

                float currentScreenSpacePorcentage = (boundRec.Size.Height / windowHeight) * (boundRec.Size.Width / windowWidth);

                if (currentScreenSpacePorcentage > playersMinimalArea)
                {
                    float currentAngle = boundRec.Angle;

                    if (boundRec.Size.Height < boundRec.Size.Width)
                    {
                        currentAngle = 90 + currentAngle;
                    }

                    // Insert angle value
                    player2Angles.PushBack(currentAngle);

                    // Get averages
                    Vector3 centerDiffAverage = Vector3.zero;
                    for (int i = 0; i < player2Centers.curLength - 1; i++)
                    {
                        centerDiffAverage += player2Centers.data[i + 1] - player2Centers.data[i];
                    }
                    centerDiffAverage /= (player2Centers.curLength - 1);

                    // average angle
                    float angleAverage = 0.0f;
                    foreach (float f in player2Angles.data)
                    {
                        angleAverage += f;
                    }
                    angleAverage /= player2Angles.curLength;

                    // !!! Setting bridge values !!!
                    playersState       = true;
                    player2Velocity    = centerDiffAverage * 1f;
                    player2Velocity.y  = -player2Velocity.y;
                    player2EulerAngles = new Vector3(0, 0, -angleAverage);
                }
            }

            //Debug Display
            if (debugFlag)
            {
                CvInvoke.Imshow("cam", imgBGR);
                CvInvoke.Imshow("p1", player1ThresholdOUT);
                CvInvoke.Imshow("p2", player2ThresholdOUT);
            }
        }
    }
示例#30
0
        /// <summary>
        /// Worker thread for image processing.
        /// </summary>
        public void CvMainThread()
        {
            var faceCascade = new CascadeClassifier();
            var eyesCascade = new CascadeClassifier();

            faceCascade.load("haarcascade_frontalface_alt.xml");
            eyesCascade.load("haarcascade_eye_tree_eyeglasses.xml");
            
            var srcFrame = new Mat();
            var dstFrame = new Mat();

            var imgProc = new ImgProc();

            _videoIo.StartCapture();

            while (true)
            {
                _videoIo.GetFrame(srcFrame);

                switch (_processingMethodIndex)
                {
                    // passthrough
                    case 0:
                        break;
                    
                    // gray
                    case 1:
                        imgProc.cvtColor(srcFrame, dstFrame, ColorConversionCodes.COLOR_RGBA2GRAY);
                        imgProc.cvtColor(dstFrame, srcFrame, ColorConversionCodes.COLOR_GRAY2RGB);
                        break;
                    
                    // canny
                    case 3:
                        imgProc.cvtColor(srcFrame, dstFrame, cvRT.ColorConversionCodes.COLOR_RGBA2GRAY);
                        imgProc.GaussianBlur(dstFrame, dstFrame, new cvRT.Size(7, 7), 1.5, 1.5);
                        imgProc.Canny(dstFrame, dstFrame, 0, 30, 3);
                        imgProc.cvtColor(dstFrame, srcFrame, ColorConversionCodes.COLOR_GRAY2RGB);
                        break;

                    // contour
                    case 4:
                    {
                        var contours = new VectorOfVectorOfPoint();
                        var hierarchy = new VectorOfVec4i();
                        var color = new Scalar(255, 255, 255, 255);

                        imgProc.Canny(srcFrame, dstFrame, 100, 100 * 2, 3);
                        imgProc.FindContours(dstFrame, contours, hierarchy, ContourRetrievalAlgorithm.RETR_TREE, ContourApproximationModes.CHAIN_APPROX_SIMPLE, new Point(0, 0));

                        srcFrame.Set(new Scalar(0, 0, 0, 0));

                        for (var i = 0 ; i < contours.Count();  i++)
                        {
                            imgProc.DrawContours(srcFrame, contours, i, color, 2, 8, hierarchy, 0, new Point(0, 0));
                        }
                            
                        break;
                    }

                    // face detect
                    case 5:
                    {
                        imgProc.cvtColor(srcFrame, dstFrame, ColorConversionCodes.COLOR_RGBA2GRAY);
                        imgProc.EqualizeHist(dstFrame, dstFrame);

                        // Faces in the frame.
                        var faces = new List<Rect>();

                        try
                        {
                            faces = new List<Rect>();
                            faceCascade.detectMultiScale(dstFrame, faces, 1.1, 2, (int)(0 | CV_HAAR.SCALE_IMAGE), new cvRT.Size(30, 30));
                        }
                        catch (Exception ex)
                        {
                            Debug.WriteLine("Exception {0}", ex.Message);
                        }

                        // For each face, detect the eyes
                        foreach (var face in faces)
                        {
                            // Draw ellipse for the face.
                            var faceCenter = new Point(face.X + face.Width / 2, face.Y + face.Height / 2);
                            imgProc.Ellipse(srcFrame, faceCenter, new cvRT.Size(face.Width / 2, face.Height / 2), 0, 0, 360, new Scalar(255, 0, 255, 0), 4, 8, 0);

                            // Detect the eyes for the face
                            var faceRoi = dstFrame.RectOfInterest(face);
                            var eyes = new List<Rect>();
                            eyesCascade.detectMultiScale(faceRoi, eyes, 1.1, 2, (int) (0 | CASCADE_FLAG.CASCADE_SCALE_IMAGE),new cvRT.Size(30, 30));
                            
                            // Draw the eyes
                            foreach (var eye in eyes)
                            {
                                var eyeCenter = new Point(face.X + eye.X + eye.Width/2, face.Y + eye.Y + eye.Height/2);
                                var radius = (int) Math.Round((eye.Width + eye.Height) * 0.25);
                                imgProc.Circle(srcFrame, eyeCenter, radius, new Scalar(255, 0, 0, 0), 4, 8, 0);
                            }
                       }

                       break;
                    }

                    default:
                        break;
                }

                _videoIo.ShowFrame(srcFrame);
            }
        }
示例#31
0
        // function called when camera is opened
        private void ProcessImage()
        {
            while (_capture.IsOpened)
            {
                //capture camera frame and make a mat
                Mat sourceFrame = _capture.QueryFrame();

                // resize to PictureBox aspect ratio
                int  newHeight = (sourceFrame.Size.Height * pictureBox1.Size.Width) / sourceFrame.Size.Width;
                Size newSize   = new Size(pictureBox1.Size.Width, newHeight);
                CvInvoke.Resize(sourceFrame, sourceFrame, newSize);

                //flip frame to get correct pixel locations
                CvInvoke.Flip(sourceFrame, sourceFrame, FlipType.Vertical);

                // as a test for comparison, create a copy of the image with a binary filter:
                var binaryImage = sourceFrame.ToImage <Gray, byte>().ThresholdBinary(new Gray(125), new
                                                                                     Gray(255)).Mat;

                Image <Gray, byte> grayImage = sourceFrame.ToImage <Gray, byte>().ThresholdBinary(new Gray(125), new Gray(255));

                // Sample for gaussian blur:
                var blurredImage   = new Mat();
                var cannyImage     = new Mat();
                var decoratedImage = new Mat();
                CvInvoke.GaussianBlur(sourceFrame, blurredImage, new Size(9, 9), 0);

                // convert to B/W
                CvInvoke.CvtColor(blurredImage, blurredImage, typeof(Bgr), typeof(Gray));

                // apply canny:
                CvInvoke.Canny(blurredImage, cannyImage, 150, 255);

                CvInvoke.CvtColor(cannyImage, decoratedImage, typeof(Gray), typeof(Bgr));

                // find contours:
                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    // Build list of contours
                    CvInvoke.FindContours(cannyImage, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); // canny or grayimage

                    int    count = contours.Size;
                    string shape = "";

                    //for loop to find part type
                    for (int i = 0; i < contours.Size; i++)
                    {
                        //called to identify kind of shape
                        using (VectorOfPoint contour = contours[i])
                            using (VectorOfPoint approxContour = new VectorOfPoint())
                            {
                                CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);

                                if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
                                {
                                    if (approxContour.Size == 3)                      //The contour has 3 vertices, it is a triangle
                                    {
                                        shape = "Triangle";
                                        // Invoke(new Action(() => { label2.Text = "Triangle"; }));
                                    }
                                    else if (approxContour.Size == 4)
                                    {
                                        shape = "Square";
                                        //  Invoke(new Action(() => { label2.Text = "Square"; }));
                                    }
                                    else
                                    {
                                        continue;
                                    }
                                }
                                int       area1       = decoratedImage.Width * decoratedImage.Height;
                                Rectangle boundingBox = CvInvoke.BoundingRectangle(contours[i]);
                                int       area2       = boundingBox.Width * boundingBox.Height;
                                double    ares        = CvInvoke.ContourArea(contour);

                                //discards the paper as a rectangle
                                if (area2 > area1 / 2)
                                {
                                    continue;
                                }


                                // Draw on the display frame
                                MarkDetectedObject(sourceFrame, contours[i], boundingBox, CvInvoke.ContourArea(contour), shape);
                                Point center = new Point(boundingBox.X + boundingBox.Width / 2, boundingBox.Y + boundingBox.Height / 2);
                                Invoke(new Action(() =>
                                {
                                    //prints calues to screen
                                    label2.Text = $"Coordinates: (X){center.X}, (Y){center.Y}";
                                }));

                                //called to send needed values to arduino
                                if (CvInvoke.ContourArea(approxContour, false) > 1200)
                                {
                                    SendValues(center.X, center.Y, shape);
                                }
                                else
                                {
                                    continue;
                                }
                            }
                    }

                    //prints importand screen information
                    Invoke(new Action(() =>
                    {
                        label1.Text = $" There are {contours.Size} contours detected";
                        label6.Text = $" Frame Width {sourceFrame.Width}";
                        label7.Text = $" Frame Height {sourceFrame.Height}";
                    }));

                    /*
                     * // resize to PictureBox aspect ratio
                     * int newHeight2 = (pictureBox2.Size.Height * pictureBox2.Size.Width) / sourceFrame.Size.Width;
                     * Size newSize2 = new Size(pictureBox2.Size.Width, newHeight);
                     * CvInvoke.Resize(decoratedImage, decoratedImage, newSize);
                     *
                     * // resize to PictureBox aspect ratio
                     * int newHeight3 = (pictureBox3.Size.Height * pictureBox3.Size.Width) / sourceFrame.Size.Width;
                     * Size newSize3 = new Size(pictureBox3.Size.Width, newHeight);
                     * CvInvoke.Resize(binaryImage, binaryImage, newSize);
                     */

                    //print Frames to screen
                    pictureBox1.Image = sourceFrame.Bitmap;
                }
            }
        }