示例#1
0
        //connect to visionary
        private void button_Connect_Click(object sender, EventArgs e)
        {
            String host = textBox_IP.Text;
            int    port = 2114;
            VisionaryDataStream dataStream = new VisionaryDataStream(host, port);
            VisionaryControl    control    = new VisionaryControl(host);

            textBox_Index.Text = "0";
            Task.Run(async() =>
            {
                // Buffer for reading data
                Byte[] bytes = new Byte[1024];

                try
                {
                    IPAddress ipAddress = Dns.Resolve("localhost").AddressList[0];
                    TcpListener server  = new TcpListener(ipAddress, Convert.ToInt32(textBox_BackendPort.Text));
                    System.Threading.Thread.Sleep(1000);
                    server.Start();
                    //MessageBox.Show("Waiting for client to connect...");
                    TcpClient client      = server.AcceptTcpClient();
                    NetworkStream nStream = client.GetStream();
                    //network loop, send then wait for reply loop
                    int i = -1;
                    while (true)
                    {
                        Thread.Sleep(2);
                        i = nStream.Read(bytes, 0, bytes.Length);

                        if (i > 0)
                        {
                            String data = null;
                            // Translate data bytes to a ASCII string.
                            data = System.Text.Encoding.ASCII.GetString(bytes, 0, i);
                            //MessageBox.Show("Received:" + data);
                            // Process the data sent by the client.
                            data = data.ToUpper();
                            //nStream.Write(bytes, 0, bytes.Length);

                            if (data == "REQUEST\n")
                            {
                                //transmit 1 frame
                                //compile image from 640x512x3 -> 983040x1
                                try
                                {
                                    //byte[] bStream = ImageToByte(bitmap_RGB);
                                    nStream.Write(bitmap_arry, 0, bitmap_arry.Length);
                                }
                                catch
                                {
                                }
                            }
                            else if (data != "EMPTY\n" && checkBox_UseBackend.Checked == true)
                            {
                                try
                                {
                                    //try parsing x/y/w/h if its not empty
                                    parsedata(data);
                                }
                                catch
                                {
                                }
                            }
                        }
                    }
                }
                catch (Exception e1)
                {
                    MessageBox.Show("SocketException: " + e1.Message);
                }
            });

            try
            {
                Task.Run(async() =>
                {
                    if (!await dataStream.ConnectAsync())
                    {
                        // Data stream connection failed
                        MessageBox.Show("error");
                    }
                    if (!await control.ConnectAsync())
                    {
                        // Data control (CoLaB) connection failed
                    }
                    await control.StartAcquisitionAsync();
                });
            }
            catch (Exception ex)
            {
                MessageBox.Show("ERROR:" + ex.ToString());
                throw;
            }
            System.Threading.Thread.Sleep(1500);

            //-------------------------------------------------------------------------------------------------------
            // Receiving & Image Processing thread
            //-------------------------------------------------------------------------------------------------------
            Task.Run(async() =>
            {
                while (true)
                {
                    //-------------------------------------------------------------------------------------------------------
                    // Read Data from Visionary Datastream
                    //-------------------------------------------------------------------------------------------------------
                    VisionaryFrame frame = await dataStream.GetNextFrameAsync();

                    //System.Threading.Thread.Sleep(1000);
                    VisionarySDepthMapData depthMap = frame.GetData <VisionarySDepthMapData>();

                    // Important: When converting multiple frames, make sure to re-use the same converter as it will result in much better performance.
                    PointCloudConverter converter = new PointCloudConverter();
                    Vector3[] pointCloud          = converter.Convert(depthMap);
                    CenterH = pointCloud[250 * 640 + 320].Z;
                    //read and set range of textboxs
                    setTextboxRange();

                    //Assign converted image
                    bitmap      = depthMap.ZMap.ToBitmap(Zmap_DR, Zmap_Offset);
                    bitmap_RGB  = depthMap.RgbaMap.ToBitmap();
                    bitmap_arry = depthMap.RgbaMap.Data.ToArray();
                    ZMap_arry   = depthMap.ZMap.Data.ToArray();

                    this.label1.Text = bitmap.GetPixel(320, 250).R.ToString();
                    //-------------------------------------------------------------------------------------------------------
                    // Optional default image proccessing method (locate box)
                    //-------------------------------------------------------------------------------------------------------
                    if (checkBox_MinAreaRect.Checked == true)
                    {
                        Bitmap TempMap = bitmap;
                        if (RGBAsZmap == true)
                        {
                            TempMap = bitmap_RGB;
                        }

                        //init different images for different detection stages
                        Image <Bgr, byte> a  = new Image <Bgr, byte>(TempMap);
                        Image <Gray, byte> b = new Image <Gray, byte>(a.Width, a.Height);         //edge detection
                        Image <Gray, byte> c = new Image <Gray, byte>(a.Width, a.Height);         //find contour

                        //set threshold
                        int Blue_threshold  = 50; //0-255
                        int Green_threshold = 50; //0-255
                        int Red_threshold   = 50; //0-255
                        if (RGBAsZmap == false)
                        {
                            a = ~a;
                            a = a.ThresholdBinary(new Bgr(Blue_threshold, Green_threshold, Red_threshold), new Bgr(255, 255, 255));
                        }

                        //Set ROI
                        a.ROI = new Rectangle(ROIx, ROIy, (int)(640 * ROIScale), (int)(512 * ROIScale));

                        //Find edges
                        int cannytherhold = 100;
                        CvInvoke.Canny(a, b, cannytherhold / 2, cannytherhold, 3, false);

                        //Enhance canny edges
                        Mat struct_element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));
                        CvInvoke.Dilate(b, b, struct_element, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(255, 255, 255));

                        //Find contours
                        VectorOfVectorOfPoint con = new VectorOfVectorOfPoint();
                        CvInvoke.FindContours(b, con, c, RetrType.List, ChainApproxMethod.ChainApproxNone);

                        Point[][] con1  = con.ToArrayOfArray();
                        PointF[][] con2 = Array.ConvertAll <Point[], PointF[]>(con1, new Converter <Point[], PointF[]>(PointToPointF));

                        listBox_BoxList.Items.Clear();
                        for (int i = 0; i < con.Size; i++)
                        {
                            //Filter params
                            double tempArea  = CvInvoke.ContourArea(con[i], true);
                            double tempArc   = CvInvoke.ArcLength(con[i], true);
                            double tempScale = tempArea / Math.Pow(tempArc / 4, 2);

                            if (tempArea >= MinPixelArea && tempScale > RatioFilter)
                            {
                                RotatedRect rrec = CvInvoke.MinAreaRect(con2[i]);       //g

                                //-------------------------------------------------------------------------------------------------------
                                // find box dimensions
                                //-------------------------------------------------------------------------------------------------------

                                //find box height
                                int boxHeight = -10000;
                                int tempX     = 0; //X axis offset if point is NULL
                                int tempY     = 0; //Y axis offset if point is NULL
                                while (boxHeight < 0 && tempX <= 50)
                                {
                                    boxHeight = (int)Math.Round(((double)BackgroundH - (double)pointCloud[(ROIy + tempY + (int)rrec.Center.Y) * 640 + (ROIx + tempX + (int)rrec.Center.X)].Z) * (double)1000);
                                    tempX++;
                                }
                                tempX--;//for angle correction

                                //apply sensor prespective angle correction
                                if (AngleCorr == true)
                                {
                                    double boxCenterOffset = Math.Sqrt(Math.Pow(pointCloud[(ROIy + (int)rrec.Center.Y) * 640 + (ROIx + tempX + (int)rrec.Center.X)].X, 2) + Math.Pow(pointCloud[(ROIy + (int)rrec.Center.Y) * 640 + (ROIx + tempX + (int)rrec.Center.X)].Y, 2));
                                    double boxCenterAngle  = Math.Atan(boxCenterOffset * 1.1 / BackgroundH);
                                    double heightMulti     = 1 / Math.Cos(boxCenterAngle);
                                    boxHeight = (int)((double)boxHeight * heightMulti);
                                }

                                //find dimension of 1 pixel from avg of X/Y axis box plane
                                tempX = 0; //unused here
                                tempY = 0; //unused here
                                double PixelScaleX = -10000;
                                double PixelScaleY = -10000;
                                double PixelScale  = -10000;

                                PixelScaleX = (double)(pointCloud[(int)(ROIy + rrec.Center.Y) * 640 + (int)(ROIx + rrec.Center.X) - (15 + tempX)].X - (double)pointCloud[(int)(ROIy + rrec.Center.Y) * 640 + (int)(ROIx + rrec.Center.X) + (15 + tempX)].X) * 1000.0 / (30.0 + (tempX + tempX));

                                PixelScaleY = (double)(pointCloud[(int)(ROIy + rrec.Center.Y - (15 + tempY)) * 640 + (int)(ROIx + rrec.Center.X)].Y - (double)pointCloud[(int)(ROIy + rrec.Center.Y + (15 + tempY)) * 640 + (int)(ROIx + rrec.Center.X)].Y) * 1000.0 / (30.0 + (tempY + tempY));

                                PixelScale = (PixelScaleX + PixelScaleY) / 2;

                                int boxWidth  = (int)(rrec.Size.Width * PixelScale);
                                int boxLength = (int)(rrec.Size.Height * PixelScale);

                                //Rounding result
                                boxLength = (int)(Math.Round((double)boxLength / Rounding, MidpointRounding.AwayFromZero) * Rounding);
                                boxWidth  = (int)(Math.Round((double)boxWidth / Rounding, MidpointRounding.AwayFromZero) * Rounding);
                                boxHeight = (int)(Math.Round((double)boxHeight / Rounding, MidpointRounding.AwayFromZero) * Rounding);

                                double boxArea = ((double)boxLength / 10) * ((double)boxWidth / 10) * ((double)boxHeight / 10);//cm

                                //add box to listbox
                                listBox_BoxList.Items.Add("Box (Length: " + boxLength + "mm, Width: " + boxWidth + "mm, Height: " + boxHeight + "mm, Vol:" + boxArea + "cm^3)");

                                PointF[] pointfs = rrec.GetVertices();
                                for (int j = 0; j < pointfs.Length; j++)
                                {
                                    CvInvoke.Line(a, new Point((int)pointfs[j].X, (int)pointfs[j].Y), new Point((int)pointfs[(j + 1) % 4].X, (int)pointfs[(j + 1) % 4].Y), new MCvScalar(0, 255, 0, 255), 4);
                                }
                            }
                        }

                        //save box list
                        if (SaveQueueList == true)
                        {
                            System.IO.StreamWriter SaveFile = new System.IO.StreamWriter(textBox_Savepath.Text + "/Output/Detection Result/" + DateTime.Now.ToString("yyyyMMdd_hhmmss") + ".txt");

                            foreach (var item in listBox_BoxList.Items)
                            {
                                SaveFile.WriteLine(item);
                            }

                            SaveFile.Close();
                            SaveQueueList = false;
                        }

                        //save pointcloud
                        if (SaveQueuePly == true)
                        {
                            await PointCloudPlyWriter.WriteFormatPLYAsync(textBox_Savepath.Text + "/Output/PointCloud/" + index.ToString() + ".ply", pointCloud, depthMap.RgbaMap, true);
                        }

                        /*
                         * //for displaying contours
                         * for (int i = 0; i < con.Size; i++)
                         * {
                         *  CvInvoke.DrawContours(d, con, i, new MCvScalar(255, 255, 0, 255), 2);
                         * }
                         */

                        this.pictureBox2.Image = a.ToBitmap();
                        this.pictureBox1.Image = bitmap_RGB;
                    }
                    else
                    {
                        this.pictureBox2.Image = bitmap_RGB;
                        this.pictureBox1.Image = bitmap;
                    }
                    try
                    {
                        bitmap_Mixed = mixedMap(bitmap_arry, ZMap_arry, Convert.ToUInt16(textBox_DynamicRange.Text));
                        this.pictureBox_Mixed.Image = bitmap_Mixed;
                    }
                    catch { }
                }
            });
        }
示例#2
0
        public void DisplayWebcam(popup popup)
        {
            while (true)
            {
                Mat          frame         = ReadCamera1();
                int          largestCircle = 0;
                List <Shape> foundShape    = new List <Shape>();
                List <char>  shapeType     = new List <char>();


                // resize to PictureBox aspect ratio
                int  newHeight = (frame.Size.Height * pictureBox0.Size.Width) / frame.Size.Width;
                Size newSize   = new Size(pictureBox0.Size.Width, newHeight);
                CvInvoke.Resize(frame, frame, newSize);

                Image <Hsv, Byte>  img               = frame.ToImage <Hsv, Byte>();
                Image <Gray, byte> coralNestImg      = frame.ToImage <Gray, Byte>();
                Image <Gray, byte> coralOutcropImg   = frame.ToImage <Gray, Byte>();
                Image <Gray, byte> starfishImg       = frame.ToImage <Gray, Byte>();
                Image <Bgr, byte>  drawnImage        = frame.ToImage <Bgr, byte>();
                Image <Bgr, byte>  coralNestDrawn    = frame.ToImage <Bgr, byte>();
                Image <Bgr, byte>  coralOutcropDrawn = frame.ToImage <Bgr, byte>();
                Image <Bgr, byte>  starfishDrawn     = frame.ToImage <Bgr, byte>();
                Image <Gray, byte> gridImg           = frame.ToImage <Gray, Byte>();
                Image <Bgr, byte>  gridDrawn         = frame.ToImage <Bgr, byte>();

                //Line Follow
                Hsv T_Lower = new Hsv(colorThresholds[4].h_Lower, colorThresholds[4].s_Lower, colorThresholds[4].v_Lower);
                Hsv T_Upper = new Hsv(colorThresholds[4].h_Upper, colorThresholds[4].s_Upper, colorThresholds[4].v_Upper);
                Image <Gray, byte> laneImg = frame.ToImage <Gray, Byte>();
                laneImg = img.InRange(T_Lower, T_Upper).Erode(2).Dilate(2);

                //count for columbs
                int leftC   = 0;
                int centerC = 0;
                int rightC  = 0;


                for (int i = (laneImg.Height / 20) * 15; i < laneImg.Height; i++)
                {
                    for (int j = (laneImg.Width / 20) * 14; j < (laneImg.Width / 20) * 16; j++)
                    {
                        //left
                        if (laneImg.Data[i, j, 0] == 255)
                        {
                            leftC++;
                        }
                    }
                    for (int j = (laneImg.Width / 20) * 16; j < (laneImg.Width / 20) * 18; j++)
                    {
                        //center
                        if (laneImg.Data[i, j, 0] == 255)
                        {
                            centerC++;
                        }
                    }
                    for (int j = (laneImg.Width / 20) * 18; j < (laneImg.Width / 20) * 20; j++)
                    {
                        //right
                        if (laneImg.Data[i, j, 0] == 255)
                        {
                            rightC++;
                        }
                    }
                }

                if (leftC > centerC && leftC > rightC)
                {
                    //twist left
                    Twist twist = new Twist();
                    twist.Linear.Z = 1f;
                    SetInput(twist);
                    label7.Invoke(new Action(() => {
                        label7.Text = "left";
                    }));
                }
                else
                {
                    if (centerC > rightC && centerC > leftC)
                    {
                        // twist center
                        Twist twist = new Twist();
                        twist.Linear.X = 1f;
                        SetInput(twist);
                        label7.Invoke(new Action(() => {
                            label7.Text = "center";
                        }));
                    }
                    else
                    {
                        //twist right
                        Twist twist = new Twist();
                        twist.Linear.Z = -1f;
                        SetInput(twist);

                        label7.Invoke(new Action(() => {
                            label7.Text = "right";
                        }));
                    }
                }

                if (leftC + rightC + centerC <= 100)
                {
                    Twist twist = new Twist();
                    twist.Linear.X = 0;
                    SetInput(twist);
                    //Update keys
                    popup.UpdateKeys(key);
                }

                // Find Centers and Draw Bounding Rectangle
                for (int i = 0; i < 3; i++)
                {
                    VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
                    List <Shape>          shapes   = new List <Shape>();
                    Mat    draw        = new Mat();
                    Hsv    t_Lower     = new Hsv(colorThresholds[i].h_Lower, colorThresholds[i].s_Lower, colorThresholds[i].v_Lower);
                    Hsv    t_Upper     = new Hsv(colorThresholds[i].h_Upper, colorThresholds[i].s_Upper, colorThresholds[i].v_Upper);
                    int    shapeIndex  = 0;
                    double largestArea = 0;
                    switch (i)
                    {
                    case 0:
                        coralNestImg = img.InRange(t_Lower, t_Upper).Erode(2).Dilate(2);
                        CvInvoke.FindContours(coralNestImg, contours, draw, Emgu.CV.CvEnum.RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple);
                        break;

                    case 1:
                        coralOutcropImg = img.InRange(t_Lower, t_Upper).Erode(2).Dilate(2);
                        CvInvoke.FindContours(coralOutcropImg, contours, draw, Emgu.CV.CvEnum.RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple);
                        break;

                    case 2:
                        starfishImg = img.InRange(t_Lower, t_Upper).Erode(2).Dilate(2);
                        CvInvoke.FindContours(starfishImg, contours, draw, Emgu.CV.CvEnum.RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple);
                        break;

                    default:
                        break;
                    }


                    for (int j = 0; j < contours.Size; j++)
                    {
                        double        peramiter = CvInvoke.ArcLength(contours[j], true);
                        VectorOfPoint positions = new VectorOfPoint();
                        CvInvoke.ApproxPolyDP(contours[j], positions, 0.03 * peramiter, true);
                        var moments = CvInvoke.Moments((contours[j]));
                        shapes.Add(new Shape());
                        //Find Centroid of shape and area and store in shape class
                        shapes[j].x_Center = (moments.M10 / moments.M00);
                        shapes[j].y_Center = (moments.M01 / moments.M00);
                        shapes[j].area     = CvInvoke.ContourArea(contours[j]);
                    }
                    //Only compute if there are shapes on screen
                    if (shapes.Count > 0)
                    {
                        // find largest shape sstore it
                        for (int j = 0; j < shapes.Count; j++)
                        {
                            if (shapes[j].area > largestArea)
                            {
                                largestArea = shapes[j].area;
                                shapeIndex  = j;
                            }
                        }


                        Rectangle boundingRect = new Rectangle((int)(shapes[shapeIndex].x_Center - 50), (int)(shapes[shapeIndex].y_Center - 50), 100, 100);
                        switch (i)
                        {
                        case 0:
                            coralNestDrawn = coralNestImg.Convert <Bgr, byte>();
                            coralNestDrawn.Draw(boundingRect, new Bgr(0, 0, 255), 3, LineType.EightConnected, 0);
                            //Add to list of shapes found
                            foundShape.Add(shapes[shapeIndex]);
                            shapeType.Add('n');
                            break;

                        case 1:
                            coralOutcropDrawn = coralOutcropImg.Convert <Bgr, byte>();
                            coralOutcropDrawn.Draw(boundingRect, new Bgr(0, 0, 255), 3, LineType.EightConnected, 0);
                            //Add to list of shapes found
                            foundShape.Add(shapes[shapeIndex]);
                            shapeType.Add('o');
                            break;

                        case 2:
                            starfishDrawn = starfishImg.Convert <Bgr, byte>();
                            starfishDrawn.Draw(boundingRect, new Bgr(0, 0, 255), 3, LineType.EightConnected, 0);
                            //Add to list of shapes found
                            foundShape.Add(shapes[shapeIndex]);
                            shapeType.Add('s');
                            break;
                        }
                    }
                }
                //Circle Detection

                /*     //    var findEdges = img.SmoothGaussian((int)(colorThresholds[3].h_Upper)).Convert<Gray, byte>().ThresholdBinaryInv(new Gray(colorThresholds[3].s_Lower), new Gray(colorThresholds[3].s_Upper)).Erode((int)(colorThresholds[3].h_Lower)).Dilate((int)(colorThresholds[3].h_Lower));
                 *
                 *       Hsv c_Lower = new Hsv(colorThresholds[3].h_Lower, colorThresholds[3].s_Lower, colorThresholds[3].v_Lower);
                 *       Hsv c_Upper = new Hsv(colorThresholds[3].h_Upper, colorThresholds[3].s_Upper, colorThresholds[3].v_Upper);
                 *       Image<Gray, byte> findEdges = frame.ToImage<Gray, Byte>();
                 *       findEdges = img.InRange(c_Lower, c_Upper).Erode(2);
                 *
                 *       Gray cannyThreshold = new Gray(180);
                 *               Gray cannyThresholdLinking = new Gray(120);
                 *               Gray circleAccumulatorThreshold = new Gray(60);
                 *
                 *               CircleF[] foundCircles = findEdges.HoughCircles(
                 *                       cannyThreshold,
                 *                       circleAccumulatorThreshold,
                 *                       4.0, //Resolution of the accumulator used to detect centers of the circles
                 *                       4.0, //Resolution of the accumulator used to detect centers of the circles
                 *                       10.0, //min distance
                 *                       10, //min radius
                 *                       30 //max radius
                 *                       )[0]; //Get the circles from the first channel
                 *
                 *               drawnImage = findEdges.Convert<Bgr, Byte>();
                 *
                 *
                 *               // Find Largest Circle and Draw it on Frame
                 *               for (int j = 0; j < foundCircles.Length; j++)
                 *               {
                 *                       if (j == 0)
                 *                       {
                 *                               // Skip 0 because its the largest circle at this time
                 *                       }
                 *                       else
                 *                       {
                 *                               if (foundCircles[j].Area > foundCircles[largestCircle].Area) // Check to see if this is the Largest Circle
                 *                               {
                 *                                       largestCircle = j;
                 *                               }
                 *                       }
                 *               }
                 *               if (foundCircles.Length != 0)
                 *                       drawnImage.Draw(foundCircles[largestCircle], new Bgr(0, 255, 0), 3, LineType.EightConnected, 0); // Circle Draw
                 *
                 */


                // Grid Detection
                VectorOfVectorOfPoint Contours = new VectorOfVectorOfPoint();
                Mat Draw    = new Mat();
                Hsv g_Lower = new Hsv(colorThresholds[5].h_Lower, colorThresholds[5].s_Lower, colorThresholds[5].v_Lower);
                Hsv g_Upper = new Hsv(colorThresholds[5].h_Upper, colorThresholds[5].s_Upper, colorThresholds[5].v_Upper);
                gridImg = img.InRange(g_Lower, g_Upper).Erode(2).Dilate(2);
                CvInvoke.FindContours(coralNestImg, Contours, Draw, Emgu.CV.CvEnum.RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple);
                List <Shape> gShapes = new List <Shape>();
                for (int j = 0; j < Contours.Size; j++)
                {
                    double        peramiter = CvInvoke.ArcLength(Contours[j], true);
                    VectorOfPoint positions = new VectorOfPoint();
                    CvInvoke.ApproxPolyDP(Contours[j], positions, 0.03 * peramiter, true);
                    var moments = CvInvoke.Moments((Contours[j]));
                    gShapes.Add(new Shape());
                    //Find Centroid of shape and area and store in shape class
                    gShapes[j].x_Center = (moments.M10 / moments.M00);
                    gShapes[j].y_Center = (moments.M01 / moments.M00);
                    gShapes[j].area     = CvInvoke.ContourArea(Contours[j]);
                }

                if (gShapes.Count > 0)
                {
                    for (int i = 0; i < gShapes.Count; i++)
                    {
                        if (gShapes[i].area > 400 || gShapes[i].area < 20000)
                        {
                            gShapes.RemoveAt(i);                             // remove unwanted shapes
                        }
                    }

                    /*
                     *                                      // outline grid squares found
                     *                                      gridDrawn = gridImg.Convert<Bgr, byte>();
                     *                                      for(int i =0;i<gShapes.Count; i++)
                     *                                      {
                     *                                              Rectangle boundingRect = new Rectangle((int)(gShapes[i].x_Center - 50), (int)(gShapes[i].y_Center - 50), 100, 100);
                     *                                              gridDrawn.Draw(boundingRect, new Bgr(0, 0, 255), 3, LineType.EightConnected, 0);
                     *                                      }
                     *
                     *                                      for(int i = 0; i < gShapes.Count; i++)
                     *                                      {
                     *                                              if (LastPosition.Count == 0)
                     *                                              {
                     *                                                      LastPosition.Add(gShapes[i]); // store shapes positions for next rotation
                     *                                                      start = 0;
                     *                                                      end = foundShape.Count;
                     *                                                      //check if centroids are close to those of found shapes
                     *                                                      for (int j = 0; j < foundShape.Count; j++)
                     *                                                      {
                     *                                                              // Check centerpoints with tolerance of +- 20
                     *                                                              if(((foundShape[j].x_Center < gShapes[i].x_Center + 20) && (foundShape[j].x_Center > gShapes[i].x_Center - 20)) && ((foundShape[j].y_Center < gShapes[i].y_Center + 20) && (foundShape[j].y_Center > gShapes[i].y_Center - 20)))
                     *                                                              {
                     *                                                                      key[i] = shapeType[j];
                     *                                                              }
                     *                                                      }
                     *
                     *                                              }
                     *                                              else
                     *                                              {
                     *                                                      int tEnd = 0;
                     *                                                      for(int j = 0; j < gShapes.Count; j++)
                     *                                                      {
                     *
                     *                                                              if (((LastPosition[j].x_Center < gShapes[i].x_Center + 10) && (LastPosition[j].x_Center > gShapes[i].x_Center - 10)) && ((LastPosition[j].y_Center < gShapes[i].y_Center + 10) && (LastPosition[j].y_Center > gShapes[i].y_Center - 10)))
                     *                                                              {
                     *                                                                      tEnd = j;
                     *                                                              }
                     *                                                              end = tEnd;
                     *                                                              start = (end - foundShape.Count);
                     *
                     *                                                      }
                     *
                     *                                                      for (int j = 0; j < gShapes.Count; j++)
                     *                                                      {
                     *
                     *                                                              if (((foundShape[j].x_Center < gShapes[i].x_Center + 20) && (foundShape[j].x_Center > gShapes[i].x_Center - 20)) && ((foundShape[j].y_Center < gShapes[i].y_Center + 20) && (foundShape[j].y_Center > gShapes[i].y_Center - 20)))
                     *                                                              {
                     *                                                                      key[i + start] = shapeType[j];
                     *                                                              }
                     *
                     *                                                      }
                     *                                              }
                     *                                      }
                     */
                }                 // Grid analysis

                // Display Images
                pictureBox0.Image = frame.Bitmap;
                pictureBox1.Image = coralNestDrawn.Bitmap;
                pictureBox2.Image = coralOutcropDrawn.Bitmap;
                pictureBox3.Image = starfishDrawn.Bitmap;
                pictureBox4.Image = drawnImage.Bitmap;
                pictureBox5.Image = laneImg.Bitmap;
                GridBox.Image     = gridDrawn.Bitmap;

                if (!Sleep(20))
                {
                    this.Invoke(new Action(() => { Close(); }));
                    break;
                }
            }
        }
示例#3
0
        private void FindLicensePlate(
            VectorOfVectorOfPoint contours,
            int[,] hierachy,
            int idx,
            IInputArray gray,
            IInputArray canny,
            List <IInputOutputArray> licensePlateImagesList,
            List <IInputOutputArray> filteredLicensePlateImagesList,
            List <RotatedRect> detectedLicensePlateRegionList,
            List <String> licenses)
        {
            if (hierachy.Length != 0)
            {
                for (; idx >= 0; idx = hierachy[idx, 0])
                {
                    int numberOfChildren = GetNumberOfChildren(hierachy, idx);
                    //if it does not contains any children (charactor), it is not a license plate region
                    if (numberOfChildren == 0)
                    {
                        continue;
                    }

                    using (VectorOfPoint contour = contours[idx])
                    {
                        if (CvInvoke.ContourArea(contour) > 400)
                        {
                            if (numberOfChildren < 3)
                            {
                                //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor)
                                //However we should search the children of this contour to see if any of them is a license plate
                                FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                                                 filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                                continue;
                            }

                            RotatedRect box = CvInvoke.MinAreaRect(contour);
                            if (box.Angle < -45.0)
                            {
                                float tmp = box.Size.Width;
                                box.Size.Width  = box.Size.Height;
                                box.Size.Height = tmp;
                                box.Angle      += 90.0f;
                            }
                            else if (box.Angle > 45.0)
                            {
                                float tmp = box.Size.Width;
                                box.Size.Width  = box.Size.Height;
                                box.Size.Height = tmp;
                                box.Angle      -= 90.0f;
                            }

                            double whRatio = (double)box.Size.Width / box.Size.Height;
                            if (!(3.0 < whRatio && whRatio < 10.0))
                            //if (!(1.0 < whRatio && whRatio < 2.0))
                            {
                                //if the width height ratio is not in the specific range,it is not a license plate
                                //However we should search the children of this contour to see if any of them is a license plate
                                //Contour<Point> child = contours.VNext;
                                if (hierachy[idx, 2] > 0)
                                {
                                    FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                                                     filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                                }
                                continue;
                            }

                            using (UMat tmp1 = new UMat())
                                using (UMat tmp2 = new UMat())
                                {
                                    PointF[] srcCorners = box.GetVertices();

                                    PointF[] destCorners = new PointF[] {
                                        new PointF(0, box.Size.Height - 1),
                                        new PointF(0, 0),
                                        new PointF(box.Size.Width - 1, 0),
                                        new PointF(box.Size.Width - 1, box.Size.Height - 1)
                                    };

                                    using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners))
                                    {
                                        CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size));
                                    }

                                    //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract
                                    Size   approxSize = new Size(240, 180);
                                    double scale      = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height);
                                    Size   newSize    = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale));
                                    CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic);

                                    //removes some pixels from the edge
                                    int       edgePixelSize = 3;
                                    Rectangle newRoi        = new Rectangle(new Point(edgePixelSize, edgePixelSize),
                                                                            tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize));
                                    UMat plate = new UMat(tmp2, newRoi);

                                    UMat filteredPlate = FilterPlate(plate);

                                    //Tesseract.Character[] words;
                                    StringBuilder strBuilder = new StringBuilder();
                                    using (UMat tmp = filteredPlate.Clone())
                                    {
                                        Emgu.CV.OCR.Tesseract.Character[] words;

                                        _ocr.Recognize(tmp);
                                        strBuilder.Append(_ocr.GetText());

                                        words = _ocr.GetCharacters();

                                        if (words.Length == 0)
                                        {
                                            continue;
                                        }

                                        for (int i = 0; i < words.Length; i++)
                                        {
                                            strBuilder.Append(words[i].Text);
                                        }
                                    }

                                    licenses.Add(strBuilder.ToString());

                                    //изображения номеров
                                    licensePlateImagesList.Add(plate);
                                    filteredLicensePlateImagesList.Add(filteredPlate);
                                    detectedLicensePlateRegionList.Add(box);
                                }
                        }
                    }
                }
            }
        }
示例#4
0
        public static void Run(Options options)
        {
            //load the image and compute the ratio of the old height
            //to the new height, clone it, and resize it
            using (var disposer = new Disposer())
            {
                var image = new Image <Bgr, byte>(options.Image);
                disposer.Add(image);
                Image <Bgr, byte> orig = image.Clone();
                disposer.Add(orig);
                double ratio = image.Height / 500.0;
                image = ImageUtil.Resize(image, height: 500);
                disposer.Add(image);

                Image <Gray, byte> gray = image.Convert <Gray, byte>();
                disposer.Add(gray);

                gray = gray.SmoothGaussian(5);
                disposer.Add(gray);

                Image <Gray, byte> edged = gray.Canny(75, 200);
                disposer.Add(edged);

                Console.WriteLine("STEP 1: Edge Detection");

                CvInvoke.Imshow("Image", image);
                CvInvoke.Imshow("Edged", edged);
                CvInvoke.WaitKey();
                CvInvoke.DestroyAllWindows();

                //find the contours in the edged image, keeping only the
                //largest ones, and initialize the screen contour
                VectorOfVectorOfPoint cnts = new VectorOfVectorOfPoint();
                disposer.Add(cnts);

                using (Image <Gray, byte> edgedClone = edged.Clone())
                {
                    CvInvoke.FindContours(edgedClone, cnts, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                }


                Point[] screenCnt = null;
                foreach (VectorOfPoint c in
                         Enumerable.Range(0, cnts.Size).Select(i => cnts[i]).OrderByDescending(c => CvInvoke.ContourArea(c)).Take(5))
                {
                    //approximate the contour
                    double peri = CvInvoke.ArcLength(c, true);
                    using (VectorOfPoint approx = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(c, approx, 0.02 * peri, true);
                        if (approx.Size == 4)
                        {
                            screenCnt = approx.ToArray();
                            break;
                        }
                    }
                }
                if (screenCnt == null)
                {
                    Console.WriteLine("Failed to find polygon with four points");
                    return;
                }

                //show the contour (outline) of the piece of paper
                Console.WriteLine("STEP 2: Find contours of paper");
                image.Draw(screenCnt, new Bgr(0, 255, 0), 2);
                CvInvoke.Imshow("Outline", image);
                CvInvoke.WaitKey();
                CvInvoke.DestroyAllWindows();

                //apply the four point transform to obtain a top-down
                //view of the original image
                Image <Bgr, byte> warped = FourPointTransform(orig, screenCnt.Select(pt => new PointF((int)(pt.X * ratio), (int)(pt.Y * ratio))));
                disposer.Add(warped);

                //convert the warped image to grayscale, then threshold it
                //to give it that 'black and white' paper effect
                Image <Gray, byte> warpedGray = warped.Convert <Gray, byte>();
                disposer.Add(warpedGray);

                warpedGray = warpedGray.ThresholdAdaptive(new Gray(251), AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 251, new Gray(10));
                disposer.Add(warpedGray);

                Console.WriteLine("STEP 3: Apply perspective transform");
                Image <Bgr, byte> origResized = ImageUtil.Resize(orig, height: 650);
                disposer.Add(origResized);
                CvInvoke.Imshow("Original", origResized);
                Image <Gray, byte> warpedResized = ImageUtil.Resize(warpedGray, height: 650);
                disposer.Add(warpedResized);
                CvInvoke.Imshow("Scanned", warpedResized);
                CvInvoke.WaitKey();
                CvInvoke.DestroyAllWindows();
            }
        }
    // Update is called once per frame
    void Update()
    {
        //CvInvoke.CvtColor(image, imgGray, ColorConversion.Bgr2Gray);

        Mat image;

        Mat structElt = CvInvoke.GetStructuringElement(eltShape, new Size(sizeX, sizeY), new Point(coordX, coordY));

        //Query the frame
        if (video.IsOpened)
        {
            image = video.QueryFrame();

            //HSV img
            Mat imgHSV = image.Clone();
            CvInvoke.CvtColor(image, imgHSV, ColorConversion.Bgr2Hsv);

            //Blur
            Mat imgHSVBlur = imgHSV.Clone();
            CvInvoke.MedianBlur(imgHSVBlur, imgHSVBlur, 21);

            //New Img
            Image <Hsv, Byte> newImg = imgHSVBlur.ToImage <Hsv, Byte>();

            Hsv lowerBound  = new Hsv(cMin, sMin, vMin);
            Hsv higherBound = new Hsv(cMax, sMax, vMax);

            Image <Gray, Byte> thresholdImg = newImg.InRange(lowerBound, higherBound);

            Image <Gray, Byte> thresholdImgErode = thresholdImg.Clone();

            CvInvoke.Dilate(thresholdImgErode, thresholdImgErode, structElt, new Point(-1, -1), nbrIteration, border, new MCvScalar(0));
            CvInvoke.Erode(thresholdImgErode, thresholdImgErode, structElt, new Point(-1, -1), nbrIteration, border, new MCvScalar(0));


            VectorOfVectorOfPoint contours       = new VectorOfVectorOfPoint();
            VectorOfPoint         biggestContour = new VectorOfPoint();
            int    biggestContourIndex           = 0;
            double biggestContourArea            = 0;

            Mat hierarchy = new Mat();
            CvInvoke.FindContours(thresholdImgErode, contours, hierarchy, retraitType, chainApprox);


            for (int i = 0; i < contours.Size; i++)
            {
                if (biggestContourArea < CvInvoke.ContourArea(contours[i]))
                {
                    biggestContourArea  = CvInvoke.ContourArea(contours[i]);
                    biggestContour      = contours[i];
                    biggestContourIndex = i;
                }
            }

            /*
             * if (biggestContour.Size > 0)
             * {
             *  int x = 0;
             *  int y = 0;
             *
             *  for (int i = 0; i < biggestContour.Size; i++)
             *  {
             *      x += biggestContour[i].X;
             *      y += biggestContour[i].Y;
             *  }
             *
             *  x /= biggestContour.Size;
             *  y /= biggestContour.Size;
             *
             *  Point centroid = new Point(x, y);
             *
             *  CvInvoke.Circle(image, centroid, 10, new MCvScalar(0, 0, 255));
             * }*/

            //Centroid
            var   moments  = CvInvoke.Moments(biggestContour);
            int   cx       = (int)(moments.M10 / moments.M00);
            int   cy       = (int)(moments.M01 / moments.M00);
            Point centroid = new Point(cx, cy);
            CvInvoke.Circle(image, centroid, 10, new MCvScalar(0, 0, 255));

            CvInvoke.DrawContours(image, contours, biggestContourIndex, new MCvScalar(0, 0, 0));


            //Invoke C++ interface fonction "Imshow"
            CvInvoke.Imshow("Video view", image);

            //Invoke C++ interface fonction "Imshow"
            CvInvoke.Imshow("Video view hsv", imgHSVBlur);

            // Invoke C++ interface fonction "Imshow"
            CvInvoke.Imshow("Video view threshhold", thresholdImg);

            //Invoke C++ interface fonction "Imshow"
            CvInvoke.Imshow("Video view threshold erode + dilate", thresholdImgErode);

            //Block thread for 24 milisecond
            CvInvoke.WaitKey(24);
        }
    }
示例#6
0
        private void findShapes(Image <Bgr, byte> imgInput)
        {
            Image <Gray, byte> imgCanny = new Image <Gray, byte>(imgInput.Width, imgInput.Height, new Gray(0));

            imgCanny = imgInput.Canny(20, 50);

            //pictureBox1.Image = imgCanny.Bitmap;

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat mat = new Mat();

            CvInvoke.FindContours(imgCanny, contours, mat, Emgu.CV.CvEnum.RetrType.Tree, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);

            int[] shapes = new int[4];

            /* [0] . . . Triangle
             * [1] . . . Square
             * [2] . . . Rectangle
             * [3] . . . Circle
             */

            for (int i = 0; i < contours.Size; i++)
            {
                double        perimeter = CvInvoke.ArcLength(contours[i], true);
                VectorOfPoint approx    = new VectorOfPoint();
                CvInvoke.ApproxPolyDP(contours[i], approx, 0.04 * perimeter, true);

                CvInvoke.DrawContours(imgInput, contours, i, new MCvScalar(0, 0, 255), 2);
                pictureBox1.Image = imgInput.Bitmap;
                //moments  center of the shape

                var moments = CvInvoke.Moments(contours[i]);
                int x       = (int)(moments.M10 / moments.M00);
                int y       = (int)(moments.M01 / moments.M00);

                if (CvInvoke.ContourArea(approx) > 250) //ONLY CONSIDERS SHAPES WITH AREA GREATER THAN 100
                {
                    if (approx.Size == 3)
                    {
                        shapes[0]++;
                        CvInvoke.PutText(imgInput, "Triangle", new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 0), 1);
                    }

                    if (approx.Size == 4)
                    {
                        Rectangle rect = CvInvoke.BoundingRectangle(contours[i]);

                        double ar = (double)rect.Width / rect.Height;

                        if (ar >= 0.95 && ar <= 1.05)
                        {
                            shapes[1]++;
                            CvInvoke.PutText(imgInput, "Square", new Point(x, y),
                                             Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 0), 1);
                        }
                        else
                        {
                            shapes[2]++;
                            CvInvoke.PutText(imgInput, "Rectangle", new Point(x, y),
                                             Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 0), 1);
                        }
                    }

                    if (approx.Size > 6)
                    {
                        CvInvoke.PutText(imgInput, "Circle", new Point(x, y),
                                         Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 0), 1);
                        shapes[3]++;
                    }

                    pictureBox1.Image = imgInput.Bitmap;
                }
            }
            textBox8.Text = shapes[0].ToString();
            textBox6.Text = shapes[1].ToString();
            textBox4.Text = shapes[2].ToString();
            textBox2.Text = shapes[3].ToString();
        }
示例#7
0
        private Mat DetectObject(Mat detectionFrame, Mat displayFrame, Rectangle box)
        {
            Image <Bgr, Byte> buffer_im = displayFrame.ToImage <Bgr, Byte>();

            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                IOutputArray hirarchy = null;
                // Construir lista de contur (contornos)
                CvInvoke.FindContours(detectionFrame, contours, hirarchy, RetrType.List, ChainApproxMethod.ChainApproxSimple);

                // seleccionar el contour (contorno) mas grande
                if (contours.Size > 0)
                {
                    double        maxArea = 0;
                    int           chosen  = 0;
                    VectorOfPoint contour = null;
                    for (int i = 0; i < contours.Size; i++)
                    {
                        contour = contours[i];

                        double area = CvInvoke.ContourArea(contour);
                        if (area > maxArea)
                        {
                            maxArea = area;
                            chosen  = i;
                        }
                    }


                    VectorOfPoint hullPoints = new VectorOfPoint();
                    VectorOfInt   hullInt    = new VectorOfInt();

                    CvInvoke.ConvexHull(contours[chosen], hullPoints, true);
                    CvInvoke.ConvexHull(contours[chosen], hullInt, false);

                    Mat defects = new Mat();


                    if (hullInt.Size > 3)
                    {
                        CvInvoke.ConvexityDefects(contours[chosen], hullInt, defects);
                    }

                    box = CvInvoke.BoundingRectangle(hullPoints);
                    CvInvoke.Rectangle(displayFrame, box, drawingColor);               //Box rectangulo que encierra el area mas grande

                    center = new Point(box.X + box.Width / 2, box.Y + box.Height / 2); //centro  rectangulo MOUSE

                    var infoCentro = new string[] { $"Centro", $"Posicion: {center.X}, {center.Y}" };
                    WriteMultilineText(displayFrame, infoCentro, new Point(center.X + 30, center.Y));
                    CvInvoke.Circle(displayFrame, new Point(center.X, center.Y), 2, new MCvScalar(0, 100, 0), 4);
                    detectGesture = true;

                    buffer_im.Dispose();
                    defects.Dispose();
                    detectionFrame.Dispose();
                    return(displayFrame);
                }
                buffer_im.Dispose();
                detectionFrame.Dispose();
                detectGesture = false;
                return(displayFrame);
            }
        }
示例#8
0
        private void button2_Click(object sender, EventArgs e)
        {
            Image         triangleRectangleImageBox1;
            Image         circleImageBox1;
            Image         lineImageBox1;
            StringBuilder msgBuilder = new StringBuilder("Performance: ");
            Bitmap        clip_bmp   = new Bitmap(cliped_image);
            //Load the image from file and resize it for display
            Image <Bgr, Byte> img = new Image <Bgr, byte>(clip_bmp).Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true);

            //Convert the image to grayscale and filter out the noise
            UMat uimage = new UMat();

            CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);

            #region circle detection
            Stopwatch watch                      = Stopwatch.StartNew();
            double    cannyThreshold             = 180.0; // 180
            double    circleAccumulatorThreshold = 120.0; // 120
            CircleF[] circles                    = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);

            watch.Stop();
            msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            #region Canny and edge detection
            watch.Reset(); watch.Start();
            double cannyThresholdLinking = 120.0; // 120
            UMat   cannyEdges            = new UMat();
            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);

            LineSegment2D[] lines = CvInvoke.HoughLinesP(
                cannyEdges,
                1,              //Distance resolution in pixel-related units - 1
                Math.PI / 45.0, //Angle resolution measured in radians.
                0,              //threshold - 20
                0,              //min Line width - 30
                10);            //gap between lines - 10

            watch.Stop();
            msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            #region Find triangles and rectangles
            watch.Reset(); watch.Start();
            List <Triangle2DF> triangleList = new List <Triangle2DF>();
            List <RotatedRect> boxList      = new List <RotatedRect>(); //a box is a rotated rectangle

            using (Emgu.CV.Util.VectorOfVectorOfPoint contours = new Emgu.CV.Util.VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                int count = contours.Size;
                for (int i = 0; i < count; i++)
                {
                    using (Emgu.CV.Util.VectorOfPoint contour = contours[i])
                        using (Emgu.CV.Util.VectorOfPoint approxContour = new Emgu.CV.Util.VectorOfPoint())
                        {
                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                            if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
                            {
                                if (approxContour.Size == 3)                      //The contour has 3 vertices, it is a triangle
                                {
                                    Point[] pts = approxContour.ToArray();
                                    triangleList.Add(new Triangle2DF(
                                                         pts[0],
                                                         pts[1],
                                                         pts[2]
                                                         ));
                                }
                                else if (approxContour.Size == 4) //The contour has 4 vertices.
                                {
                                    #region determine if all the angles in the contour are within [80, 100] degree
                                    bool            isRectangle = true;
                                    Point[]         pts         = approxContour.ToArray();
                                    LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                    for (int j = 0; j < edges.Length; j++)
                                    {
                                        double angle = Math.Abs(
                                            edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                        if (angle < 80 || angle > 100)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                    }
                                    #endregion

                                    if (isRectangle)
                                    {
                                        boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                    }
                                }
                            }
                        }
                }
            }

            watch.Stop();
            msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            //originalImageBox.Image = img.ToBitmap();
            this.Text = msgBuilder.ToString();

            #region draw triangles and rectangles
            Image <Bgr, Byte> triangleRectangleImage = img.CopyBlank();
            foreach (Triangle2DF triangle in triangleList)
            {
                triangleRectangleImage.Draw(triangle, new Bgr(Color.DarkBlue), 2);
            }
            foreach (RotatedRect box in boxList)
            {
                triangleRectangleImage.Draw(box, new Bgr(Color.DarkOrange), 2);
            }
            triangleRectangleImageBox1 = triangleRectangleImage.ToBitmap();
            #endregion

            #region draw circles
            Image <Bgr, Byte> circleImage = img.CopyBlank();
            foreach (CircleF circle in circles)
            {
                circleImage.Draw(circle, new Bgr(Color.Brown), 2);
            }
            circleImageBox1 = circleImage.ToBitmap();
            #endregion

            #region draw lines
            Image <Bgr, Byte> lineImage = img.CopyBlank();
            foreach (LineSegment2D line in lines)
            {
                lineImage.Draw(line, new Bgr(Color.Green), 2);
            }
            lineImageBox1 = lineImage.ToBitmap();
            #endregion

            cv_image          = lineImageBox1;
            pictureBox2.Image = lineImageBox1;
        }
示例#9
0
        /// <summary>
        /// Funkcja wykrywająca znaki
        /// </summary>
        private void WykryjZnaki()
        {
            // lista trójkątów
            List <Triangle2DF> triangleList = new List <Triangle2DF>();

            // lista prostokątów i kwadratów
            List <RotatedRect> boxList = new List <RotatedRect>();

            ZwiekszProgressBar(1);

            // przetworzenie zdjecia do postaci wskazującej tylko białe kontury na czarnym tle
            Image <Gray, byte> canny_zdj = new Image <Gray, byte>(imgInput.Width, imgInput.Height, new Gray(0));

            canny_zdj = imgInput.Canny(300, 250);

            // przypisanie canny_zdj do pictureBox i rozciagniecie
            zdjecieCannyBox.Image    = canny_zdj.Bitmap;
            zdjecieCannyBox.SizeMode = PictureBoxSizeMode.StretchImage;
            ZwiekszProgressBar(2);

            LineSegment2D[] lines = CvInvoke.HoughLinesP(
                canny_zdj,
                1,
                Math.PI / 45.0,
                20,
                30,
                10);

            Image <Gray, byte>    imgOut   = canny_zdj.Convert <Gray, byte>().ThresholdBinary(new Gray(50), new Gray(200));
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat hier = new Mat();

            ZwiekszProgressBar(1);

            // wygładzenie obrazu
            imgSmooth = imgInput.PyrDown().PyrUp();
            imgSmooth._SmoothGaussian(3);

            // ograniczenie wykrywanych figur do odpowiedniego zakresu ze skali RGB - zółtego
            imgOut = imgSmooth.InRange(new Bgr(0, 140, 150), new Bgr(80, 255, 255));
            imgOut = imgOut.PyrDown().PyrUp();
            imgOut._SmoothGaussian(3);

            ZwiekszProgressBar(2);

            Dictionary <int, double> dict = new Dictionary <int, double>();

            // wyszukanie konturów spełniajacych wymogi odnośnie między innymi koloru
            CvInvoke.FindContours(imgOut, contours, null, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
            label1.Text = contours.Size.ToString();

            // jeśli odnaleziono choćby jeden kontur
            if (contours.Size > 0)
            {
                // petla przechodząca po wszystkich wykrytych konturach
                for (int i = 0; i < contours.Size; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                        using (VectorOfPoint approxContour = new VectorOfPoint())
                        {
                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);

                            // filtr wielkości pola wykrytego konturu
                            if (CvInvoke.ContourArea(approxContour, false) > 50)
                            {
                                // jesli to trójkąt
                                if (approxContour.Size == 3)
                                {
                                    // tablica punktów i dodanie ich do tablicy trójkątów
                                    Point[] pts = approxContour.ToArray();
                                    triangleList.Add(new Triangle2DF(
                                                         pts[0],
                                                         pts[1],
                                                         pts[2]
                                                         ));

                                    // sprawdzenie, czy wykryty trojkat jest figurą obróconą jednym z wierzcholkow do dołu
                                    if (pts[1].X > pts[0].X && pts[1].Y > pts[0].Y)
                                    {
                                        // ustawienie znaku A-7
                                        UstawWykrytyZnak(2);
                                        double area = CvInvoke.ContourArea(contours[i]);
                                        // dodanie do tablicy (słownika) głównej
                                        dict.Add(i, area);
                                    }
                                }

                                // jesli to czworokąt
                                else if (approxContour.Size == 4)
                                {
                                    bool isRectangle = true;

                                    // rozbicie figury na pojedyncze krawędzie
                                    Point[]         pts   = approxContour.ToArray();
                                    LineSegment2D[] edges = PointCollection.PolyLine(pts, true);

                                    // petla przechodzaca po wszystkich krawedziach
                                    for (int j = 0; j < edges.Length; j++)
                                    {
                                        // sprawdzenie wielkosci kąta miedzy sprawdzanymi krawędziami
                                        double angle = Math.Abs(
                                            edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                        // przerwanie jeśli kąty w figurze są mniejsze niż 80 i wieksze niż 100 stopni
                                        if (angle < 80 || angle > 100)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                    }

                                    if (isRectangle)
                                    {
                                        RotatedRect rrect = CvInvoke.MinAreaRect(contours[i]);

                                        // ostateczne sprawdzenie, czy wykryta figura jest obrócona względem środka o wartość od 40 do 50 stopni
                                        // znak D-1 jest obróconym kwadratem o 45 stopni względem środka
                                        if ((rrect.Angle < -40 && rrect.Angle > -50) || (rrect.Angle > 40 && rrect.Angle < 50))
                                        {
                                            boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                            double area = CvInvoke.ContourArea(contours[i]);
                                            dict.Add(i, area);
                                            UstawWykrytyZnak(1);
                                        }
                                    }
                                }
                            }
                        }
                }
            }

            ZwiekszProgressBar(2);

            var item = dict.OrderByDescending(v => v.Value);

            foreach (var it in item)
            {
                int key = int.Parse(it.Key.ToString());

                // pobranie odpowiednich konturów
                Rectangle rect = CvInvoke.BoundingRectangle(contours[key]);

                // narysowanie czerwonego prostokąta wokół wykrytego znaku
                CvInvoke.Rectangle(imgInput, rect, new MCvScalar(0, 0, 255), 1);
            }

            ZwiekszProgressBar(2);

            pictureBox2.Image    = imgInput.Bitmap;
            pictureBox2.SizeMode = PictureBoxSizeMode.StretchImage;

            // utworzenie zdjęcia wskazującego WSZYSTKIE kontury w początkowym zdjęciu - czerowne linie
            Image <Bgr, Byte> lineImage = imgInput.CopyBlank();

            foreach (LineSegment2D line in lines)
            {
                lineImage.Draw(line, new Bgr(Color.Red), 1);
            }
            zdjecieWykrytyZnak.Image    = lineImage.Bitmap;
            zdjecieWykrytyZnak.SizeMode = PictureBoxSizeMode.StretchImage;
        }
        //
        //COMIENZAN FUNCIONES DE EDDIE
        //
        private void DetectObject(Mat detectionFrame, Mat displayFrame)
        {
            System.Drawing.Rectangle box  = new System.Drawing.Rectangle();
            Image <Bgr, byte>        temp = detectionFrame.ToImage <Bgr, byte>();

            temp = temp.Rotate(180, new Bgr(0, 0, 0));
            Image <Bgr, Byte> buffer_im = displayFrame.ToImage <Bgr, Byte>();
            float             a         = buffer_im.Width;
            float             b         = buffer_im.Height;

            MessageBox.Show("El tamano camara es  W: " + a.ToString() + " y H:" + b.ToString());

            boxList.Clear();
            rect.Clear();
            triangleList.Clear();
            circleList.Clear();
            ellipseList.Clear();

            //transforma imagen
            //UMat uimage = new UMat();
            // CvInvoke.CvtColor(displayFrame, uimage, ColorConversion.Bgr2Gray);
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                ///  IOutputArray hirarchy = null;
                /// CvInvoke.FindContours(detectionFrame, contours, hirarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
                ///CvInvoke.Polylines(detectionFrame, contours, true, new MCvScalar(255, 0, 0), 2, LineType.FourConnected);
                Image <Bgr, Byte> resultadoFinal = displayFrame.ToImage <Bgr, byte>();

                resultadoFinal = resultadoFinal.Rotate(180, new Bgr(0, 0, 0));

                //Circulos
                //double cannyThreshold = 180.0;
                //double circleAccumulatorThreshold = 120;
                //CircleF[] circles = CvInvoke.HoughCircles(detectionFrame, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);

                /// if (contours.Size > 0)
                ///{
                double        maxArea = 1000;
                int           chosen  = 0;
                VectorOfPoint contour = null;

                /*
                 * for (int i = 0; i < contours.Size; i++)
                 * {
                 *  contour = contours[i];
                 *
                 *  double area = CvInvoke.ContourArea(contour);
                 *  if (area > maxArea)
                 *  {
                 *      System.Drawing.Rectangle rect = new System.Drawing.Rectangle();
                 *      //  maxArea = area;
                 *      chosen = i;
                 *      //}
                 *      //}
                 *
                 *      //Boxes
                 *      VectorOfPoint hullPoints = new VectorOfPoint();
                 *      VectorOfInt hullInt = new VectorOfInt();
                 *
                 *      CvInvoke.ConvexHull(contours[chosen], hullPoints, true);
                 *      CvInvoke.ConvexHull(contours[chosen], hullInt, false);
                 *
                 *      Mat defects = new Mat();
                 *
                 *      if (hullInt.Size > 3)
                 *          CvInvoke.ConvexityDefects(contours[chosen], hullInt, defects);
                 *
                 *      box = CvInvoke.BoundingRectangle(hullPoints);
                 *      CvInvoke.Rectangle(displayFrame, box, drawingColor);//Box rectangulo que encierra el area mas grande
                 *                                                          // cropbox = crop_color_frame(displayFrame, box);
                 *
                 *      buffer_im.ROI = box;
                 *
                 *      Image<Bgr, Byte> cropped_im = buffer_im.Copy();
                 *      //pictureBox8.Image = cropped_im.Bitmap;
                 *      System.Drawing.Point center = new System.Drawing.Point(box.X + box.Width / 2, box.Y + box.Height / 2);//centro  rectangulo MOUSE
                 *      System.Drawing.Point esquina_superiorI = new System.Drawing.Point(box.X, box.Y);
                 *      System.Drawing.Point esquina_superiorD = new System.Drawing.Point(box.Right, box.Y);
                 *      System.Drawing.Point esquina_inferiorI = new System.Drawing.Point(box.X, box.Y + box.Height);
                 *      System.Drawing.Point esquina_inferiorD = new System.Drawing.Point(box.Right, box.Y + box.Height);
                 *      CvInvoke.Circle(displayFrame, esquina_superiorI, 5, new MCvScalar(0, 0, 255), 2);
                 *      CvInvoke.Circle(displayFrame, esquina_superiorD, 5, new MCvScalar(0, 0, 255), 2);
                 *      CvInvoke.Circle(displayFrame, esquina_inferiorI, 5, new MCvScalar(0, 0, 255), 2);
                 *      CvInvoke.Circle(displayFrame, esquina_inferiorD, 5, new MCvScalar(0, 0, 255), 2);
                 *      CvInvoke.Circle(displayFrame, center, 5, new MCvScalar(0, 0, 255), 2);
                 *      VectorOfPoint start_points = new VectorOfPoint();
                 *      VectorOfPoint far_points = new VectorOfPoint();
                 *
                 *
                 *
                 *
                 *
                 *
                 *  }
                 * }
                 */
                //Dibuja borde rojo
                var temp2 = temp.SmoothGaussian(5).Convert <Gray, byte>().ThresholdBinary(new Gray(20), new Gray(255));
                temp2 = temp2.Rotate(180, new Gray(0));
                VectorOfVectorOfPoint contorno = new VectorOfVectorOfPoint();
                Mat mat = new Mat();
                CvInvoke.FindContours(temp2, contorno, mat, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);


                for (int i = 0; i < contorno.Size; i++)
                {
                    VectorOfPoint approxContour = new VectorOfPoint();
                    double        perimetro     = CvInvoke.ArcLength(contorno[i], true);
                    VectorOfPoint approx        = new VectorOfPoint();

                    VectorOfPointF approxF = new VectorOfPointF();
                    double         area    = CvInvoke.ContourArea(contorno[i]);
                    if (area > 5000)
                    {
                        CvInvoke.ApproxPolyDP(contorno[i], approx, 0.04 * perimetro, true);
                        // CvInvoke.DrawContours(displayFrame, contorno, i, new MCvScalar(255, 0, 0), 2);

                        //pictureBox4.Image = temp2.Bitmap;

                        var moments = CvInvoke.Moments(contorno[i]);
                        int x       = (int)(moments.M10 / moments.M00);
                        int y       = (int)(moments.M01 / moments.M00);



                        resultados.Add(approx);
                        bool isShape;
                        if (approx.Size == 3)     //The contour has 3 vertices, it is a triangle
                        {
                            System.Drawing.Point[] pts = approx.ToArray();
                            double perimetro2          = CvInvoke.ArcLength(contorno[i], true);

                            double area2        = CvInvoke.ContourArea(contorno[i]);
                            double circularidad = 4 * Math.PI * area2 / Math.Pow(perimetro2, 2);
                            MessageBox.Show("circularidad triangulo" + circularidad);
                            MessageBox.Show("Es triangulo ");

                            /*Triangle2DF triangle = new Triangle2DF(pts[0], pts[1], pts[2]);
                             * resultadoFinal.Draw(triangle, new Bgr(System.Drawing.Color.Cyan), 1);
                             * CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias);
                             * CvInvoke.PutText(resultadoFinal, "Triangle", new System.Drawing.Point(x, y),
                             * Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2);
                             * resTri.Add(approx);*/

                            //MessageBox.Show("No es triangulo ");
                            //Triangle2DF triangle = new Triangle2DF(pts[0], pts[1], pts[2]);
                            //resultadoFinal.Draw(triangle, new Bgr(System.Drawing.Color.Red), 2);
                            RotatedRect rectangle = CvInvoke.MinAreaRect(approx);
                            CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias);
                            resultadoFinal.Draw(rectangle, new Bgr(System.Drawing.Color.Cyan), 1);
                            rect.Add(CvInvoke.BoundingRectangle(approx));
                        }
                        if (approx.Size == 4)     //The contour has 4 vertices.
                        {
                            //RotatedRect tt = new RotatedRect(CvInvoke.MinAreaRect(approx).Center, CvInvoke.MinAreaRect(approx).Size, 270) ;
                            //boxList.Add(tt);

                            //Calcular si es cuadrado
                            System.Drawing.Rectangle rectAux = CvInvoke.BoundingRectangle(contorno[i]);
                            double ar = (double)rectAux.Width / rectAux.Height;

                            //Calcular circularidad
                            double perimetro2   = CvInvoke.ArcLength(contorno[i], true);
                            double area2        = CvInvoke.ContourArea(contorno[i]);
                            double circularidad = 4 * Math.PI * area2 / Math.Pow(perimetro2, 2);

                            MessageBox.Show("circularidad rect " + circularidad);
                            if (circularidad > 0.69)
                            {
                                //Si la circularidad>0.6 y cumple proporcion es cuadrado
                                if (ar >= 0.8 && ar <= 1.0)
                                {
                                    MessageBox.Show("Cuadrado ");
                                    RotatedRect rectangle = CvInvoke.MinAreaRect(contorno[i]);
                                    CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias);
                                    resultadoFinal.Draw(rectangle, new Bgr(System.Drawing.Color.Cyan), 1);
                                    //CvInvoke.PutText(resultadoFinal, "Rectangle", new System.Drawing.Point(x, y),
                                    //Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2);
                                    rect.Add(CvInvoke.BoundingRectangle(approx));
                                }
                                //Es elipse
                                else
                                {
                                    MessageBox.Show("parecia rectangulo pero era elipse ");
                                    Ellipse final_ellipse       = new Ellipse(CvInvoke.MinAreaRect(contorno[i]).Center, CvInvoke.MinAreaRect(contorno[i]).Size, 0);
                                    Ellipse final_ellipseDibujo = new Ellipse(CvInvoke.MinAreaRect(contorno[i]).Center, CvInvoke.MinAreaRect(contorno[i]).Size, 90);
                                    ellipseList.Add(final_ellipse);

                                    //IConvexPolygonF poligono = CvInvoke.MinAreaRect(approx);
                                    //resultadoFinal.Draw(poligono, new Bgr(Color.Cyan), 1);
                                    resultadoFinal.Draw(final_ellipseDibujo, new Bgr(System.Drawing.Color.Cyan), 1);
                                    CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias);
                                    //CvInvoke.PutText(resultadoFinal, "Figura circular", new System.Drawing.Point(x, y),
                                    //      Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2);
                                }
                            }
                            //Es rectangulo
                            else
                            {
                                RotatedRect rectangle = CvInvoke.MinAreaRect(contorno[i]);
                                CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias);
                                resultadoFinal.Draw(rectangle, new Bgr(System.Drawing.Color.Cyan), 1);
                                //CvInvoke.PutText(resultadoFinal, "Rectangle", new System.Drawing.Point(x, y),
                                //Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2);
                                rect.Add(CvInvoke.BoundingRectangle(approx));
                            }


                            /* //prueba imagen de rectangulo
                             * //--------------------------------------PART 1 : DRAWING STUFF IN A BITMAP------------------------------------------------------------------------------------
                             * System.Drawing.Point[] pts = approx.ToArray();
                             *
                             * System.Drawing.PointF[] mypoints = Array.ConvertAll(
                             *   pts.ToArray<System.Drawing.Point>(),
                             *   value => new System.Drawing.PointF(value.X, value.Y)
                             * );
                             *
                             * System.Drawing.Rectangle r = new System.Drawing.Rectangle(0, 0, CvInvoke.BoundingRectangle(approx).Width, CvInvoke.BoundingRectangle(approx).Height);
                             * Pen blackPen = new Pen(System.Drawing.Color.FromArgb(255, 255, 0, 0), 1);
                             * bmp = new Bitmap(r.Width+100,r.Height+10, PixelFormat.Format32bppArgb);
                             * Graphics g = Graphics.FromImage(bmp);
                             * g.DrawRectangle(blackPen, r); //rectangle 1
                             * g.DrawPolygon(blackPen,mypoints);
                             * System.Drawing.Rectangle rcrop = new System.Drawing.Rectangle(r.X, r.Y, r.Width + 10, r.Height + 10);//This is the cropping rectangle (bonding box adding 10 extra units width and height)
                             *
                             * //Crop the model from the bmp
                             * Bitmap src = bmp;
                             * // Bitmap target = new Bitmap(r.Width, r.Height);
                             * //using (Graphics gs = Graphics.FromImage(target))
                             * //{
                             * //  gs.DrawImage(src, rcrop, r, GraphicsUnit.Pixel);
                             * // gs.Dispose();
                             * //}
                             * //--------------------------------------PART 2 : SAVING THE BMP AS JPG------------------------------------------------------------------------------------
                             * src.Save("testOJO.jpg");*/
                        }

                        /* ELIMINAR
                         * if (approx.Size == 5 )
                         * {
                         *  System.Drawing.Point[] pts = approx.ToArray();
                         *
                         *  //MessageBox.Show("Cantidad puntos poligono "+pts.Length);
                         *  //IConvexPolygonF poligono = CvInvoke.MinAreaRect(approx);
                         *  //resultadoFinal.Draw(poligono, new Bgr(Color.Cyan), 1);
                         *  CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 0), 1, LineType.AntiAlias);
                         *  CvInvoke.PutText(resultadoFinal, "Pentagon", new System.Drawing.Point(x, y),
                         *      Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2);
                         * }*/
                        if (approx.Size >= 5)
                        {
                            double perimetro2   = CvInvoke.ArcLength(contorno[i], true);
                            double area2        = CvInvoke.ContourArea(contorno[i]);
                            double circularidad = 4 * Math.PI * area2 / Math.Pow(perimetro2, 2);
                            MessageBox.Show("circularidad elipse " + circularidad);

                            Ellipse final_ellipse       = new Ellipse(CvInvoke.MinAreaRect(contorno[i]).Center, CvInvoke.MinAreaRect(contorno[i]).Size, 0);
                            Ellipse final_ellipseDibujo = new Ellipse(CvInvoke.MinAreaRect(contorno[i]).Center, CvInvoke.MinAreaRect(contorno[i]).Size, 90);
                            ellipseList.Add(final_ellipse);

                            //IConvexPolygonF poligono = CvInvoke.MinAreaRect(approx);
                            //resultadoFinal.Draw(poligono, new Bgr(Color.Cyan), 1);
                            resultadoFinal.Draw(final_ellipseDibujo, new Bgr(System.Drawing.Color.Cyan), 1);
                            CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias);
                            //CvInvoke.PutText(resultadoFinal, "Figura circular", new System.Drawing.Point(x, y),
                            //      Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2);
                        }


                        /* _Eliminar
                         * if (approx.Size > 6)
                         * {
                         *
                         *      double circularidad = 4 * Math.PI * area / (Math.Pow(2, perimetro));
                         *  MessageBox.Show("circularidad circulo "+circularidad);
                         *      CvInvoke.PutText(resultadoFinal, "Circle", new System.Drawing.Point(x, y),
                         *      Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(0, 255, 255), 2);
                         *      CircleF circle = CvInvoke.MinEnclosingCircle(approx);
                         *      circleList.Add(circle);
                         *      CvInvoke.DrawContours(resultadoFinal, contorno, i, new MCvScalar(255, 255, 255), 1, LineType.AntiAlias);
                         *      resultadoFinal.Draw(circle, new Bgr(System.Drawing.Color.Cyan), 1);
                         *
                         *
                         *
                         * }*/
                    }
                }

                pictureBox2.Image = resultadoFinal.Bitmap;
                button2.Enabled   = true;

                ///}
            }
        }
示例#11
0
        /// <summary>
        /// Get the piece corners by finding the biggest rectangle of the contour points
        /// </summary>
        /// <param name="pieceID">ID of the piece</param>
        /// <param name="pieceImgBw">Black white image of piece</param>
        /// <param name="pieceImgColor">Color image of piece</param>
        /// <returns>List with corner points</returns>
        public override List <Point> FindCorners(string pieceID, Bitmap pieceImgBw, Bitmap pieceImgColor)
        {
            PluginFactory.LogHandle.Report(new LogEventInfo(pieceID + " Finding corners by finding the maximum rectangle within candidate points"));

            List <Point> corners = new List <Point>();

            // Find all dominant corner points using the GFTTDetector (this uses the Harris corner detector)
            GFTTDetector detector = new GFTTDetector(500, 0.01, 5, 2, true, 0.04);

            MKeyPoint[]  keyPoints       = detector.Detect(new Image <Gray, byte>(pieceImgBw));
            List <Point> possibleCorners = keyPoints.Select(k => Point.Round(k.Point)).ToList();

            if (possibleCorners.Count > 0)
            {
                // Sort the dominant corners by the distance to upper left corner of the bounding rectangle (0, 0) and keep only the corners that are near enough to this point
                List <Point> possibleCornersSortedUpperLeft = new List <Point>(possibleCorners);
                possibleCornersSortedUpperLeft.Sort(new DistanceToPointComparer(new Point(0, 0), DistanceOrders.NEAREST_FIRST));
                double minCornerDistUpperLeft = Utils.Distance(possibleCornersSortedUpperLeft[0], new PointF(0, 0));
                possibleCornersSortedUpperLeft = possibleCornersSortedUpperLeft.Where(c => Utils.Distance(c, new PointF(0, 0)) < minCornerDistUpperLeft * PieceFindCornersMaxCornerDistRatio).ToList();

                // Sort the dominant corners by the distance to upper right corner of the bounding rectangle (ImageWidth, 0) and keep only the corners that are near enough to this point
                List <Point> possibleCornersSortedUpperRight = new List <Point>(possibleCorners);
                possibleCornersSortedUpperRight.Sort(new DistanceToPointComparer(new Point(pieceImgBw.Width, 0), DistanceOrders.NEAREST_FIRST));
                double minCornerDistUpperRight = Utils.Distance(possibleCornersSortedUpperRight[0], new PointF(pieceImgBw.Width, 0));
                possibleCornersSortedUpperRight = possibleCornersSortedUpperRight.Where(c => Utils.Distance(c, new PointF(pieceImgBw.Width, 0)) < minCornerDistUpperRight * PieceFindCornersMaxCornerDistRatio).ToList();

                // Sort the dominant corners by the distance to lower right corner of the bounding rectangle (ImageWidth, ImageHeight) and keep only the corners that are near enough to this point
                List <Point> possibleCornersSortedLowerRight = new List <Point>(possibleCorners);
                possibleCornersSortedLowerRight.Sort(new DistanceToPointComparer(new Point(pieceImgBw.Width, pieceImgBw.Height), DistanceOrders.NEAREST_FIRST));
                double minCornerDistLowerRight = Utils.Distance(possibleCornersSortedLowerRight[0], new PointF(pieceImgBw.Width, pieceImgBw.Height));
                possibleCornersSortedLowerRight = possibleCornersSortedLowerRight.Where(c => Utils.Distance(c, new PointF(pieceImgBw.Width, pieceImgBw.Height)) < minCornerDistLowerRight * PieceFindCornersMaxCornerDistRatio).ToList();

                // Sort the dominant corners by the distance to lower left corner of the bounding rectangle (0, ImageHeight) and keep only the corners that are near enough to this point
                List <Point> possibleCornersSortedLowerLeft = new List <Point>(possibleCorners);
                possibleCornersSortedLowerLeft.Sort(new DistanceToPointComparer(new Point(0, pieceImgBw.Height), DistanceOrders.NEAREST_FIRST));
                double minCornerDistLowerLeft = Utils.Distance(possibleCornersSortedLowerLeft[0], new PointF(0, pieceImgBw.Height));
                possibleCornersSortedLowerLeft = possibleCornersSortedLowerLeft.Where(c => Utils.Distance(c, new PointF(0, pieceImgBw.Height)) < minCornerDistLowerLeft * PieceFindCornersMaxCornerDistRatio).ToList();

                // Combine all possibleCorners from the four lists and discard all combination with too bad angle differences
                List <FindCornerRectangleScore> scores = new List <FindCornerRectangleScore>();
                for (int indexUpperLeft = 0; indexUpperLeft < possibleCornersSortedUpperLeft.Count; indexUpperLeft++)
                {
                    for (int indexUpperRight = 0; indexUpperRight < possibleCornersSortedUpperRight.Count; indexUpperRight++)
                    {
                        for (int indexLowerRight = 0; indexLowerRight < possibleCornersSortedLowerRight.Count; indexLowerRight++)
                        {
                            for (int indexLowerLeft = 0; indexLowerLeft < possibleCornersSortedLowerLeft.Count; indexLowerLeft++)
                            {
                                if (PluginFactory.CancelToken.IsCancellationRequested)
                                {
                                    PluginFactory.CancelToken.ThrowIfCancellationRequested();
                                }

                                // Possible corner combination
                                Point[] tmpCorners = new Point[]
                                {
                                    possibleCornersSortedUpperLeft[indexUpperLeft],         // the corners are ordered beginning in the upper left corner and going counter clock wise
                                    possibleCornersSortedLowerLeft[indexLowerLeft],
                                    possibleCornersSortedLowerRight[indexLowerRight],
                                    possibleCornersSortedUpperRight[indexUpperRight]
                                };
                                double angleDiff = RectangleDifferenceAngle(tmpCorners);
                                if (angleDiff > PieceFindCornersMaxAngleDiff)
                                {
                                    continue;
                                }

                                double area = CvInvoke.ContourArea(new VectorOfPoint(tmpCorners));
                                FindCornerRectangleScore score = new FindCornerRectangleScore()
                                {
                                    AngleDiff = angleDiff, RectangleArea = area, PossibleCorners = tmpCorners
                                };
                                scores.Add(score);
                            }
                        }
                    }
                }

                // Order the scores by rectangle area (biggest first) and take the PossibleCorners of the biggest rectangle as corners
                scores = scores.OrderByDescending(s => s.RectangleArea).ToList();
                if (scores.Count > 0)
                {
                    corners.AddRange(scores[0].PossibleCorners);
                }
            }

            if (corners.Count != 4)
            {
                PluginFactory.LogHandle.Report(new LogEventError(pieceID + " Failed to find correct number of corners. " + corners.Count + " found."));
            }

            if (PluginFactory.GetGeneralSettingsPlugin().SolverShowDebugResults)
            {
                using (Image <Rgb, byte> imgCorners = new Image <Rgb, byte>(pieceImgColor))
                {
                    Features2DToolbox.DrawKeypoints(imgCorners, new VectorOfKeyPoint(keyPoints), imgCorners, new Bgr(0, 0, 255));       // Draw the dominant key points

                    for (int i = 0; i < corners.Count; i++)
                    {
                        CvInvoke.Circle(imgCorners, Point.Round(corners[i]), 4, new MCvScalar(0, Math.Max(255 - i * 50, 50), 0), 3);
                    }
                    PluginFactory.LogHandle.Report(new LogEventImage(pieceID + " Corners", imgCorners.Bitmap));
                    imgCorners.Dispose();
                }
            }
            return(corners);
        }
示例#12
0
        public CircleF FindCircle(Image <Gray, Byte> image, int estimatedRadius, int patternType, int error = 30)
        {
            circles.Clear();
            Image <Gray, Byte> bilateralFilteredImage, edgeDetectedImage, eroded, img;

            img = image.Clone();
            bilateralFilteredImage = new Mat().ToImage <Gray, byte>();
            edgeDetectedImage      = new Mat().ToImage <Gray, byte>();
            eroded = new Mat().ToImage <Gray, byte>();
            Mat hierarchy = new Mat();
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            //Mat

            CvInvoke.MorphologyEx(img, img, MorphOp.Close, GenerateEllipseKernel(13), new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
            CvInvoke.BilateralFilter(img, bilateralFilteredImage, 9, 30, 30);
            CvInvoke.Canny(bilateralFilteredImage, edgeDetectedImage, 25, 25);
            CvInvoke.MorphologyEx(edgeDetectedImage, eroded, MorphOp.Close, GenerateEllipseKernel(11), new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
            CvInvoke.FindContours(eroded, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxSimple);

            for (int i = 0; i < contours.Size; i++)
            {
                using (VectorOfPoint contour = contours[i])
                {
                    Rectangle r = CvInvoke.BoundingRectangle(contour);
                    double    w, h;
                    if (IsSquare(r.Width, r.Height))
                    {
                        w = r.Width;
                        h = r.Height;

                        double  rect_area   = ((w * w) / 4) * Math.PI;
                        CircleF circle      = CvInvoke.MinEnclosingCircle(contour);
                        double  circle_area = circle.Radius * circle.Radius * Math.PI;

                        if ((Math.Abs(rect_area - circle_area) < rect_area / 10) &&
                            (Math.Abs(Math.Sqrt(circle_area / 3.14) - estimatedRadius) < error) && (w > 21) && (h > 21))
                        {
                            CircleFWithScore temp = new CircleFWithScore(circle, CvInvoke.ContourArea(contour) / circle.Area);
                            circles.Add(temp);
                        }
                    }
                }
            }
            //CvInvoke.MatchTemplate(img,templ:templ,)
            //CvInvoke.Imshow("edge", eroded);
            //var watch = System.Diagnostics.Stopwatch.StartNew();
            CircleF result = FindHighestScoreCircle();

            if (MatchPattern(image, result, patternType))
            {
                //watch.Stop();
                //var elapsedMs = watch.ElapsedMilliseconds;
                //Console.WriteLine("\nFinished pattern matching in " + elapsedMs + "ms");
                return(result);
            }
            else
            {
                //watch.Stop();
                //var elapsedMs = watch.ElapsedMilliseconds;
                //Console.WriteLine("\nFinished pattern matching in " + elapsedMs + "ms");
                throw new IndexOutOfRangeException();
            }
        }
示例#13
0
        public Bitmap FormatImage(Bitmap bitmap)
        {
            if (bitmap.Width > bitmap.Height)
            {
                bitmap.RotateFlip(RotateFlipType.Rotate90FlipNone);
            }

            int originalWidth  = bitmap.Width;
            int originalHeight = bitmap.Height;

            Image <Bgr, Byte> img =
                new Image <Bgr, byte>(bitmap).Resize(400, 400, Inter.Linear, true); //resizing is needed for better rectangle detection

            int resizedWidth  = img.Width;
            int resizedHeight = img.Height;

            //Convert the image to grayscale and filter out the noise
            UMat uimage = new UMat();

            CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);

            // These values work best
            double cannyThreshold        = 180.0;
            double cannyThresholdLinking = 120.0;
            UMat   cannyEdges            = new UMat();

            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);

            List <Bitmap> cropedImagesList = new List <Bitmap>();

            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                int count = contours.Size;
                for (int i = 0; i < count; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                        using (VectorOfPoint approxContour = new VectorOfPoint())
                        {
                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                            if (CvInvoke.ContourArea(approxContour, false) > (resizedHeight * resizedWidth) / 3) //only consider contours with area greater than the third of the whole image
                            {
                                if (approxContour.Size == 4)                                                     //The contour has 4 vertices.
                                {
                                    //determine if all the angles in the contour are within [70, 110] degree
                                    bool            isRectangle = true;
                                    Point[]         pts         = approxContour.ToArray();
                                    LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                    for (int j = 0; j < edges.Length; j++)
                                    {
                                        double angle = Math.Abs(
                                            edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                        if (angle < 70 || angle > 110) // these values mean that the angle must be a right angle
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                    }

                                    if (isRectangle)
                                    {
                                        double[] corners = new double[8];
                                        for (int j = 0; j < 4; j++)
                                        {
                                            corners[2 * j]     = Convert.ToDouble(approxContour[j].X) * originalWidth / resizedWidth;
                                            corners[2 * j + 1] = Convert.ToDouble(approxContour[j].Y) * originalHeight / resizedHeight;
                                        }

                                        //crop only if X1 is to the left of X2
                                        if (corners[0] <= corners[2])
                                        {
                                            cropedImagesList.Add(Crop(bitmap, corners));
                                        }
                                    }
                                }
                            }
                        }
                }
            }

            if (FilterCropedImages(cropedImagesList) != null) //if we crop something
            {
                //crop image and add filter
                var result = FilterCropedImages(cropedImagesList);
                result = BradleyLocalThreshold(result);

                if (result.Width > result.Height)
                {
                    result.RotateFlip(RotateFlipType.Rotate90FlipNone);
                    return(result);
                }
                return(result);
            }
            else
            {
                //add only filter on original image
                var result = BradleyLocalThreshold(bitmap);
                return(result);
            }
        }
    public void DetectShapes()
    {
        StringBuilder msgBuilder     = new StringBuilder("Performance: ");
        double        cannyThreshold = 180.0;

        //Load the image from file and resize it for display
        var fileImage = _SavePath + _CaptureCounter.ToString() + ".png";

        Mat image = CvInvoke.Imread(_SavePath + (_CaptureCounter - 1).ToString() + ".png", Emgu.CV.CvEnum.LoadImageType.AnyColor);

        //Convert the image to grayscale and filter out the noise
        UMat uimage = new UMat();

        CvInvoke.CvtColor(image, uimage, ColorConversion.Bgr2Gray);

        //use image pyr to remove noise
        UMat pyrDown = new UMat();

        CvInvoke.PyrDown(uimage, pyrDown);
        CvInvoke.PyrUp(pyrDown, uimage);

        //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp();
        Stopwatch watch = Stopwatch.StartNew();

        #region circle detection

        #endregion

        #region Canny and edge detection
        //watch.Reset(); watch.Start();
        double cannyThresholdLinking = 120.0;
        UMat   cannyEdges            = new UMat();
        CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);
        #endregion

        #region Find triangles and rectangles
        watch.Reset(); watch.Start();
        List <Triangle2DF> triangleList = new List <Triangle2DF>();
        List <Emgu.CV.Structure.RotatedRect> boxList = new List <Emgu.CV.Structure.RotatedRect>(); //a box is a rotated rectangle

        using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
        {
            CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
            int count = contours.Size;
            for (int i = 0; i < count; i++)
            {
                using (VectorOfPoint contour = contours[i])
                    using (VectorOfPoint approxContour = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                        if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
                        {
                            if (approxContour.Size == 4)                      //The contour has 4 vertices.
                            {
                                #region determine if all the angles in the contour are within [80, 100] degree
                                bool isRectangle             = true;
                                System.Drawing.Point[] pts   = approxContour.ToArray();
                                LineSegment2D[]        edges = PointCollection.PolyLine(pts, true);

                                for (int j = 0; j < edges.Length; j++)
                                {
                                    double angle = Math.Abs(
                                        edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                    //if (angle < 80 || angle > 100)
                                    //{
                                    isRectangle = false;
                                    m_MyAudioSource.Play(0);
                                    SceneManager.LoadScene("Acertou");

                                    //}
                                }
                                #endregion

                                if (isRectangle)
                                {
                                    boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                }
                            }
                        }
                    }
            }
        }

        watch.Stop();
        #endregion
    }
示例#15
0
        private void button1_Click(object sender, EventArgs e)
        {
            DialogResult   drChosenFile;
            OpenFileDialog opf = new OpenFileDialog();
            //drChosenFile = ofdOpenFile.ShowDialog(); // open file dialog

            /* if (drChosenFile != DialogResult.OK || ofdOpenFile.FileName == "")
             * { // if user chose Cancel or filename is blank . . .
             *   lblChosenFile.Text = "file not chosen"; // show error message on label
             *   return; // and exit function
             * }*/

            Mat imgTrainingNumbers;

            try
            {
                imgTrainingNumbers = CvInvoke.Imread("E:\\Emgu\\EmguC\\EmguC\\training_chars.png", LoadImageType.AnyColor);
            }
            catch (Exception ex)
            {                                                               // if error occurred
                label1.Text = "unable to open image, error: " + ex.Message; // show error message on label
                return;                                                     // and exit function
            }

            if (imgTrainingNumbers == null)
            {                                         // if image could not be opened
                label1.Text = "unable to open image"; // show error message on label
                return;                               // and exit function
            }

            label1.Text = opf.FileName; //update label with file name

            Mat imgGrayscale  = new Mat();
            Mat imgBlurred    = new Mat(); // declare various images
            Mat imgThresh     = new Mat();
            Mat imgThreshCopy = new Mat();
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            //Matrix<Single> mtxClassifications = new Matrix<Single>();
            //Matrix<Single> mtxTrainingImages = new Matrix<Single>();
            Mat matTrainingImagesAsFlattenedFloats = new Mat();

            //possible chars we are interested in are digits 0 through 9 and capital letters A through Z, put these in list intValidChars
            var intValidChars = new List <int>(new int[] {
                '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
                'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J',
                'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T',
                'U', 'V', 'W', 'X', 'Y', 'Z'
            });

            CvInvoke.CvtColor(imgTrainingNumbers, imgGrayscale, ColorConversion.Bgr2Gray);       //convert to grayscale
            CvInvoke.GaussianBlur(imgGrayscale, imgBlurred, new Size(5, 5), 0);                  //blur

            //threshold image from grayscale to black and white
            CvInvoke.AdaptiveThreshold(imgBlurred, imgThresh, 255.0, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 11, 2);
            CvInvoke.Imshow("imgThresh", imgThresh);        //show threshold image for reference
            imgThreshCopy = imgThresh.Clone();              //make a copy of the thresh image, this in necessary b/c findContours modifies the image

            //get external countours only
            CvInvoke.FindContours(imgThreshCopy, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);
            int             intNumberOfTrainingSamples = contours.Size;
            Matrix <Single> mtxClassifications         = new Matrix <Single>(intNumberOfTrainingSamples, 1); //this is our classifications data structure

            //this is our training images data structure, note we will have to perform some conversions to write to this later
            Matrix <Single> mtxTrainingImages = new Matrix <Single>(intNumberOfTrainingSamples, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT);

            //this keeps track of which row we are on in both classifications and training images,
            int intTrainingDataRowToAdd = 0;          //note that each sample will correspond to one row in

            //both the classifications XML file and the training images XML file
            for (int i = 0; i <= contours.Size - 1; ++i)
            {                                                                                                //for each contour
                if (CvInvoke.ContourArea(contours[i]) > MIN_CONTOUR_AREA)
                {                                                                                            //if contour is big enough to consider
                    Rectangle boundingRect = CvInvoke.BoundingRectangle(contours[i]);                        //get the bounding rect
                    CvInvoke.Rectangle(imgTrainingNumbers, boundingRect, new MCvScalar(0.0, 0.0, 255.0), 2); //draw red rectangle around each contour as we ask user for input

                    Mat imgROItoBeCloned = new Mat(imgThresh, boundingRect);                                 //get ROI image of current char
                    Mat imgROI           = imgROItoBeCloned.Clone();                                         //make a copy so we do not change the ROI area of the original image
                    Mat imgROIResized    = new Mat();

                    //resize image, this is necessary for recognition and storage
                    CvInvoke.Resize(imgROI, imgROIResized, new Size(RESIZED_IMAGE_WIDTH, RESIZED_IMAGE_HEIGHT));

                    CvInvoke.Imshow("imgROI", imgROI);                         //show ROI image for reference
                    CvInvoke.Imshow("imgROIResized", imgROIResized);           //show resized ROI image for reference
                    CvInvoke.Imshow("imgTrainingNumbers", imgTrainingNumbers); //show training numbers image, this will now have red rectangles drawn on it

                    int intChar = CvInvoke.WaitKey(0);                         //get key press

                    if (intChar == 27)
                    {           //if esc key was pressed
                        CvInvoke.DestroyAllWindows();
                        return; //exit the function
                    }
                    else if (intValidChars.Contains(intChar))
                    {                                                                               //else if the char is in the list of chars we are looking for . . .
                        mtxClassifications[intTrainingDataRowToAdd, 0] = Convert.ToSingle(intChar); //write classification char to classifications Matrix

                        //now add the training image (some conversion is necessary first) . . .
                        //note that we have to covert the images to Matrix(Of Single) type, this is necessary to pass into the KNearest object call to train
                        Matrix <Single> mtxTemp         = new Matrix <Single>(imgROIResized.Size);
                        Matrix <Single> mtxTempReshaped = new Matrix <Single>(1, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT);
                        imgROIResized.ConvertTo(mtxTemp, DepthType.Cv32F);           //convert Image to a Matrix of Singles with the same dimensions

                        for (int intRow = 0; intRow <= RESIZED_IMAGE_HEIGHT - 1; ++intRow)
                        {          //flatten Matrix into one row by RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT number of columns
                            for (int intCol = 0; intCol <= RESIZED_IMAGE_WIDTH - 1; ++intCol)
                            {
                                mtxTempReshaped[0, (intRow * RESIZED_IMAGE_WIDTH) + intCol] = mtxTemp[intRow, intCol];
                            }
                        }

                        for (int intCol = 0; intCol <= (RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT) - 1; ++intCol)
                        {         //write flattened Matrix into one row of training images Matrix
                            mtxTrainingImages[intTrainingDataRowToAdd, intCol] = mtxTempReshaped[0, intCol];
                        }
                        intTrainingDataRowToAdd = intTrainingDataRowToAdd + 1; //increment which row, i.e. sample we are on
                    }
                }
            }

            label1.Text = label1.Text + "training complete !!" + "\n" + "\n";

            //save classifications to file
            XmlSerializer xmlSerializer = new XmlSerializer(mtxClassifications.GetType());
            StreamWriter  streamWriter;

            try
            {
                streamWriter = new StreamWriter("classifications.xml"); //attempt to open classifications file
            }
            catch (Exception ex)
            {  //if error is encountered, show error and return
                label1.Text = "\n" + label1.Text + "unable to open 'classifications.xml', error:" + "\n";
                label1.Text = label1.Text + ex.Message + "\n" + "\n";
                return;
            }

            xmlSerializer.Serialize(streamWriter, mtxClassifications);
            streamWriter.Close();

            //save training images to file
            xmlSerializer = new XmlSerializer(mtxTrainingImages.GetType());

            try
            {
                streamWriter = new StreamWriter("images.xml"); // attempt to open images file
            }
            catch (Exception ex)
            { // if error is encountered, show error and return
                label1.Text = "\n" + label1.Text + "unable to open 'images.xml', error:" + "\n";
                label1.Text = label1.Text + ex.Message + "\n" + "\n";
                return;
            }

            xmlSerializer.Serialize(streamWriter, mtxTrainingImages);
            streamWriter.Close();
            label1.Text = "\n" + label1.Text + "file writing done" + "\n";
            MessageBox.Show("Training complete, file writing done !!");
        }
示例#16
0
文件: Form1.cs 项目: Kidneyman/EmguCV
        public void PerformShapeDetection()
        {
            if (textBox1.Text != String.Empty)
            {
                StringBuilder msgBuilder = new StringBuilder("Performance: ");

                //Load the image from file and resize it for display
                Image <Bgr, Byte> img =
                    new Image <Bgr, byte>(textBox1.Text)
                    .Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true);

                //Convert the image to grayscale and filter out the noise
                UMat uimage = new UMat();
                CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

                //use image pyr to remove noise
                UMat pyrDown = new UMat();
                CvInvoke.PyrDown(uimage, pyrDown);
                CvInvoke.PyrUp(pyrDown, uimage);

                //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp();

                #region circle detection
                Stopwatch watch                      = Stopwatch.StartNew();
                double    cannyThreshold             = 180.0;
                double    circleAccumulatorThreshold = 120;
                CircleF[] circles                    = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);

                watch.Stop();
                msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                #region Canny and edge detection
                watch.Reset(); watch.Start();
                double cannyThresholdLinking = 120.0;
                UMat   cannyEdges            = new UMat();
                CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);

                LineSegment2D[] lines = CvInvoke.HoughLinesP(
                    cannyEdges,
                    1,              //Distance resolution in pixel-related units
                    Math.PI / 45.0, //Angle resolution measured in radians.
                    20,             //threshold
                    30,             //min Line width
                    10);            //gap between lines

                watch.Stop();
                msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                #region Find triangles and rectangles
                watch.Reset(); watch.Start();
                List <Triangle2DF> triangleList = new List <Triangle2DF>();
                List <RotatedRect> boxList      = new List <RotatedRect>(); //a box is a rotated rectangle

                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                    int count = contours.Size;
                    for (int i = 0; i < count; i++)
                    {
                        using (VectorOfPoint contour = contours[i])
                            using (VectorOfPoint approxContour = new VectorOfPoint())
                            {
                                CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                                if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
                                {
                                    if (approxContour.Size == 3)                      //The contour has 3 vertices, it is a triangle
                                    {
                                        Point[] pts = approxContour.ToArray();
                                        triangleList.Add(new Triangle2DF(
                                                             pts[0],
                                                             pts[1],
                                                             pts[2]
                                                             ));
                                    }
                                    else if (approxContour.Size == 4) //The contour has 4 vertices.
                                    {
                                        #region determine if all the angles in the contour are within [80, 100] degree
                                        bool            isRectangle = true;
                                        Point[]         pts         = approxContour.ToArray();
                                        LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                        for (int j = 0; j < edges.Length; j++)
                                        {
                                            double angle = Math.Abs(
                                                edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                            if (angle < 80 || angle > 100)
                                            {
                                                isRectangle = false;
                                                break;
                                            }
                                        }
                                        #endregion

                                        if (isRectangle)
                                        {
                                            boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                        }
                                    }
                                }
                            }
                    }
                }

                watch.Stop();
                msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                OriginalImageBox.Image = img;
                this.Text = msgBuilder.ToString();

                #region draw triangles and rectangles
                Mat triangleRectangleImage = new Mat(img.Size, DepthType.Cv8U, 3);
                triangleRectangleImage.SetTo(new MCvScalar(0));
                foreach (Triangle2DF triangle in triangleList)
                {
                    CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(triangle.GetVertices(), Point.Round), true, new Bgr(Color.DarkBlue).MCvScalar, 2);
                }
                foreach (RotatedRect box in boxList)
                {
                    CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2);
                }

                TriangleandRectangleImageBox.Image = triangleRectangleImage;
                #endregion

                #region draw circles
                Mat circleImage = new Mat(img.Size, DepthType.Cv8U, 3);
                circleImage.SetTo(new MCvScalar(0));
                foreach (CircleF circle in circles)
                {
                    CvInvoke.Circle(circleImage, Point.Round(circle.Center), (int)circle.Radius, new Bgr(Color.Brown).MCvScalar, 2);
                }

                CircleImageBox.Image = circleImage;
                #endregion

                #region draw lines
                Mat lineImage = new Mat(img.Size, DepthType.Cv8U, 3);
                lineImage.SetTo(new MCvScalar(0));
                foreach (LineSegment2D line in lines)
                {
                    CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(Color.Green).MCvScalar, 2);
                }

                LineImageBox.Image = lineImage;
                #endregion
            }
        }
示例#17
0
    // Update is called once per frame
    void Update()
    {
        imageOrig = webCam.QueryFrame();
        if (imageOrig != null)
        {
            imageGray = new Mat();
            imageHSV  = new Mat();
            Mat imageAverage   = new Mat();
            Mat imageMedian    = new Mat();
            Mat imageGaussian  = new Mat();
            Mat imageBilateral = new Mat();
            imageResult = new Mat();

            CvInvoke.Flip(imageOrig, imageOrig, FlipType.Horizontal);
            CvInvoke.Resize(imageOrig, imageOrig, new Size(imSize, imSize * webCam.Height / webCam.Width));

            CvInvoke.CvtColor(imageOrig, imageGray, ColorConversion.Bgr2Gray);
            CvInvoke.CvtColor(imageOrig, imageHSV, ColorConversion.Bgr2Hsv);
            // Draw Original Image
            //CvInvoke.Imshow(imNameOrig, imageOrig);
            // Draw Original Image
            CvInvoke.Imshow(imNameColor, imageHSV);

            Mat filteredImg = new Mat();
            //CvInvoke.BilateralFilter( imageHSV, filteredImg, 3, 75, 75 );
            //CvInvoke.GaussianBlur( imageHSV, filteredImg, new Size(7,7), 0 );
            CvInvoke.MedianBlur(imageHSV, filteredImg, 7);
            Image <Hsv, System.Byte> rangeImg = filteredImg.ToImage <Hsv, System.Byte>();
            // Yellow 40-70; 0-255; 0-255
            Hsv bottomHsv = new Hsv(18, 127, 127);             // 0-179, 0-255, 0-255
            Hsv topHsv    = new Hsv(35, 240, 240);
            if (detectColorBlue)
            {
                // BLue 230-180; 0-255; 0-255
                bottomHsv = new Hsv(80, 70, 70);                 // 0-179, 0-255, 0-255
                topHsv    = new Hsv(120, 250, 250);
            }
            //rangeImg.Data[0,0,0];
            Mat imagBW           = rangeImg.InRange(bottomHsv, topHsv).Mat;
            int elementSize      = 5;
            Mat structureElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(elementSize, elementSize), new Point(-1, -1));
            CvInvoke.Erode(imagBW, imagBW, structureElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar(0));
            CvInvoke.Dilate(imagBW, imagBW, structureElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar(0));
            // Contours
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            int    biggestContourIndex     = -1;
            double biggestContourArea      = -1;
            Mat    hierarchy = new Mat();
            CvInvoke.FindContours(imagBW, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);
            if (contours.Size > 0)
            {
                biggestContourIndex = 0;
                biggestContourArea  = CvInvoke.ContourArea(contours[biggestContourIndex]);
            }
            for (int i = 1; i < contours.Size; i++)
            {
                double currentArea = CvInvoke.ContourArea(contours[i]);
                if (currentArea > biggestContourArea)
                {
                    biggestContourIndex = i;
                    biggestContourArea  = currentArea;
                }
            }
            if (contours.Size > 0)
            {
                VectorOfPoint biggestContour = contours[biggestContourIndex];

                CvInvoke.DrawContours(imageOrig, contours, biggestContourIndex, new MCvScalar(255, 0, 0), 5);

                if (previousBiggestContourArea > 0)                     // Object entering
                {
                    if (biggestContourArea > previousBiggestContourArea * 1.6)
                    {
                        // Going Foward
                        //Debug.Log( "Front" );
                        if (canon)
                        {
                            canon.Shoot();
                        }
                    }
                    else if (biggestContourArea * 1.6 < previousBiggestContourArea)
                    {
                        // Going backward
                        //Debug.Log( "Back" );
                    }
                }
                previousBiggestContourArea = biggestContourArea;

                //* Centroid
                MCvMoments moment = CvInvoke.Moments(contours[biggestContourIndex]);
                int        cx     = (int)(moment.M10 / moment.M00);
                int        cy     = (int)(moment.M01 / moment.M00);

                imageOrig = drawPoint(imageOrig, cx, cy, 5);

                if (canon)
                {
                    canon.setHorizontalPosition(cx / (float)imSize);
                }
                //*/

                //* Top Point
                Point top = biggestContour[0];
                for (int i = 1; i < biggestContour.Size; i++)
                {
                    Point p = biggestContour[i];
                    if (top.Y > p.Y)
                    {
                        top = p;
                    }
                }

                if (canon)
                {
                    canon.setVerticalPosition(((float)imageOrig.SizeOfDimemsion[0] - top.Y) / (float)imageOrig.SizeOfDimemsion[0]);
                }
                imageOrig = drawPoint(imageOrig, top.X, top.Y, 5, 255, 0, 0);
                //*/
            }
            else
            {
                // Object leaving
                previousBiggestContourArea = -1;
            }
            CvInvoke.Imshow("BW", imagBW);
            CvInvoke.Imshow(imNameOrig, imageOrig);

            // Filtering

            /*CvInvoke.Blur( imageHSV, imageAverage, new Size(5,5), new Point(-1,-1) );
             * CvInvoke.Imshow("Average", imageAverage);
             * CvInvoke.MedianBlur( imageHSV, imageMedian, 5 );
             * CvInvoke.Imshow("Median", imageMedian);
             * CvInvoke.GaussianBlur( imageHSV, imageGaussian, new Size(5,5), 0 );
             * CvInvoke.Imshow("Gaussian", imageGaussian);
             * CvInvoke.BilateralFilter( imageHSV, imageBilateral, 3, 75, 75 );
             * CvInvoke.Imshow("Bilateral", imageBilateral);*/

            //CvInvoke.Imshow(imNameResult, imageResult);

            // Storing
            writer.Write(imageOrig);
        }
        else
        {
            webCam = new VideoCapture(imAddress);
        }

        if (Input.GetKeyDown(KeyCode.Escape))
        {
            Application.Quit();
        }
    }
示例#18
0
文件: Form1.cs 项目: GodSunJae/Emgu
        private void button1_Click(object sender, EventArgs e)
        {
            //단일쓰레드 일때 문자열을 많이 읽고 변경할경우 좋은 StringBuilder 사용 ( string 비해 속도 매우빠름)
            // https://blog.naver.com/impressives2/221338797755 참고
            StringBuilder msgBuilder = new StringBuilder("Performance: ");


            //Image<Bgr, Byte> img =
            //  new Image<Bgr, byte>(fileNameTextBox.Text)
            //  .Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true);


            // OpenFileDialog 으로 image를 가지고와야 하므로 객체생성
            OpenFileDialog ofd = new OpenFileDialog();

            //// OpenFileDialog 상자가 열리고 확인버튼을 눌렀을 때 !!
            if (ofd.ShowDialog() == DialogResult.OK)
            {
                //https://dic1224.blog.me/220841161411  Resize는 확대축소
                //https://blog.naver.com/PostView.nhn?blogId=dic1224&logNo=220841171866&parentCategoryNo=&categoryNo=152&viewDate=&isShowPopularPosts=true&from=search
                //위에꺼는 소스와 그림(?)
                //https://dic1224.blog.me/220841161411 Emgu.CV.CvEnum.Inter.Linear의 구조
                img = new Image <Bgr, Byte>(ofd.FileName).Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true);
            }
            UMat uimage = new UMat();

            // img 객체 이미지가 Bgr 형식으로 되어있으니 Bgr2Gray로 그레이시킨 후 uimage에 값을 출력
            CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);



            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(uimage, pyrDown); // 노이즈제거 및 그레이된걸 샘플링다운으로 추출 (출력..?)
            CvInvoke.PyrUp(pyrDown, uimage);   // 노이즈제거 후 샘플링업으로 추출( 출력..)

            Image <Gray, Byte> gray = img.Convert <Gray, Byte>().PyrDown().PyrUp();

            #region circle detection
            // 경과시간을 정확히 추출하는 객체 하나생성 ( 0초로) 시간을 재야함
            Stopwatch watch = Stopwatch.StartNew();

            //원형 만들기 위한 수치
            double cannyThreshold             = 180.0;
            double circleAccumulatorThreshold = 120;

            //uimage는 노이즈제거, 그레이, 샘플링다운상태
            //uimage로 원그리기
            CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);

            // 원다그렸으니 시간을 멈추기
            watch.Stop();

            // 얼마나 걸렸는지 출력
            msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            #region Canny and edge detection
            // 0초로만들고 다시시작
            watch.Reset(); watch.Start();
            double cannyThresholdLinking = 120.0;

            UMat cannyEdges = new UMat();
            //Canny알고리즘사용하여 cannyEdges 객체에 값넣기 3,4번째 임계값1,2
            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);


            //cannyEdges 한 후 Hough lines한다
            LineSegment2D[] lines = CvInvoke.HoughLinesP(
                cannyEdges,
                1,              //Distance resolution in pixel-related units
                Math.PI / 45.0, //Angle resolution measured in radians.
                20,             //threshold
                30,             //min Line width
                10);            //gap between lines

            // 측정을멈춤
            watch.Stop();
            //몇초 걸렸는지 출력
            msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            #region Find triangles and rectangles
            // 새로운 테스트위해 시간을 0초로 되돌리고 시작
            watch.Reset(); watch.Start();

            // triangles 객체생성
            List <Triangle2DF> triangleList = new List <Triangle2DF>();
            // rectangles 객체생성
            List <RotatedRect> boxList = new List <RotatedRect>(); //a box is a rotated rectangle


            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                int count = contours.Size;
                for (int i = 0; i < count; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                        using (VectorOfPoint approxContour = new VectorOfPoint())
                        {
                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                            if (CvInvoke.ContourArea(approxContour, false) > 100) //only consider contours with area greater than 250
                            {
                                if (approxContour.Size == 3)                      //The contour has 3 vertices, it is a triangle
                                {
                                    Point[] pts = approxContour.ToArray();
                                    triangleList.Add(new Triangle2DF(
                                                         pts[0],
                                                         pts[1],
                                                         pts[2]
                                                         ));
                                }
                                else if (approxContour.Size == 4) //The contour has 4 vertices.
                                {
                                    #region determine if all the angles in the contour are within [80, 100] degree
                                    bool            isRectangle = true;
                                    Point[]         pts         = approxContour.ToArray();
                                    LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                    for (int j = 0; j < edges.Length; j++)
                                    {
                                        double angle = Math.Abs(
                                            edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                        if (angle < 90 || angle > 110)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                    }
                                    #endregion

                                    if (isRectangle)
                                    {
                                        boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                    }
                                }
                            }
                        }
                }
            }
            //측정종료
            watch.Stop();
            //출력
            msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            // 원본 img는 그대로 pictureBox에 출력
            originalImageBox.Image = img;
            // 폼제목을 측정한 데이터로 변경
            this.Text = msgBuilder.ToString();

            #region draw triangles and rectangles
            Image <Bgr, Byte> triangleRectangleImage = img.CopyBlank();
            foreach (Triangle2DF triangle in triangleList)
            {
                triangleRectangleImage.Draw(triangle, new Bgr(Color.DarkBlue), 2);
            }
            foreach (RotatedRect box in boxList)
            {
                triangleRectangleImage.Draw(box, new Bgr(Color.DarkOrange), 2);
            }
            // triangles and rectangles 한 img를 pictureBox2에출력
            triangleRectangleImageBox.Image = triangleRectangleImage;
            #endregion

            Image <Bgr, Byte> circleImage = img.CopyBlank();
            #region draw circles  img.CopyBlank();
            foreach (CircleF circle in circles)
            {
                circleImage.Draw(circle, new Bgr(Color.Brown), 2);
            }
            // circles 한 img를 pictureBox3에출력
            circleImageBox.Image = circleImage;
            #endregion

            #region draw lines
            Image <Bgr, Byte> lineImage = img.CopyBlank();
            foreach (LineSegment2D line in lines)
            {
                lineImage.Draw(line, new Bgr(Color.Green), 2);
            }
            // Detected Lines 한 이미지를 pictureBox4에출력
            lineImageBox.Image = lineImage;
            #endregion
        }
示例#19
0
        public void GetBoundries(Image <Gray, Byte> binaryBackground, out List <Point[]> boundries, out List <Point[]> artefacts, out List <RotatedRect> boxes)
        {
            //Find outer boundries
            double minimumContourArea = 250;
            double minimumBoundryArea = 1000;
            //double approximationFactor = 0.001;
            List <Point[]>     allBoundries = new List <Point[]>();
            List <Point[]>     allObjects   = new List <Point[]>();
            List <RotatedRect> boxList      = new List <RotatedRect>();

            using (Image <Gray, Byte> filteredBinary = binaryBackground.SmoothMedian(7))
                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                    using (Mat hierarchy = new Mat())
                    {
                        CvInvoke.FindContours(filteredBinary, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxNone);
                        var temp = hierarchy.ToImage <Bgra, Byte>();

                        int        count      = contours.Size;
                        List <int> boundryIds = new List <int>();
                        for (int i = 0; i < count; i++)
                        {
                            using (VectorOfPoint contour = contours[i])
                            {
                                double contourArea = CvInvoke.ContourArea(contour);

                                if (contourArea >= minimumBoundryArea)
                                {
                                    Bgra currentContour = temp[0, i];
                                    if (currentContour.Alpha == 0)
                                    {
                                        allBoundries.Add(contour.ToArray());
                                        boundryIds.Add(i);
                                    }
                                }
                            }
                        }

                        for (int i = 0; i < count; i++)
                        {
                            using (VectorOfPoint contour = contours[i])
                            {
                                double contourArea = CvInvoke.ContourArea(contour);

                                if (contourArea >= minimumContourArea)
                                {
                                    Bgra currentContour = temp[0, i];

                                    if (!boundryIds.Contains(i) && boundryIds.Contains((int)currentContour.Alpha))
                                    {
                                        bool isRectangle = true;
                                        bool isCircle    = false;
                                        //Can the object be approximated as a circle or rectangle?
                                        using (VectorOfPoint apxContour = new VectorOfPoint())
                                        {
                                            double epsilon = CvInvoke.ArcLength(contour, true) * 0.05;
                                            CvInvoke.ApproxPolyDP(contour, apxContour, epsilon, true);

                                            if (apxContour.Size == 4) //The contour has 4 vertices.
                                            {
                                                Point[]         pts   = apxContour.ToArray();
                                                LineSegment2D[] edges = PointCollection.PolyLine(pts, true);

                                                for (int j = 0; j < edges.Length; j++)
                                                {
                                                    double angle = Math.Abs(edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                                    if (angle < 70 || angle > 110)
                                                    {
                                                        isRectangle = false;
                                                        break;
                                                    }
                                                }

                                                if (isRectangle)
                                                {
                                                    boxList.Add(CvInvoke.MinAreaRect(apxContour));
                                                }
                                            }
                                            else
                                            {
                                                isRectangle = false;
                                            }
                                        }

                                        if (!isRectangle && !isCircle)
                                        {
                                            allObjects.Add(contour.ToArray());
                                        }
                                    }
                                }
                            }
                        }
                    }

            //Find mouse
            //mousePoints = null;
            //using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            //{
            //    CvInvoke.FindContours(binaryMouse, contours, null, RetrType.External, ChainApproxMethod.ChainApproxNone);

            //    int count = contours.Size;
            //    double maxArea = 0;
            //    for (int j = 0; j < count; j++)
            //    {
            //        using (VectorOfPoint contour = contours[j])
            //        {
            //            double contourArea = CvInvoke.ContourArea(contour);
            //            if (contourArea >= maxArea)
            //            {
            //                maxArea = contourArea;
            //                mousePoints = contour.ToArray();
            //            }
            //        }
            //    }
            //}

            boundries = allBoundries;
            artefacts = allObjects;
            boxes     = boxList;
            //Check if any contours can be approximated as shapes


            //We now have a list of boundries, if there's more than one it means something is sticking across the screen
            if (allBoundries.Count > 1)
            {
                //Need to find points from all boundries that are effectively parallel
            }

            //Image<Bgr, Byte> allContourImage = FirstFrame.Clone();

            //allContourImage.DrawPolyline(mousePoints, true, new Bgr(Color.Yellow), 2);
            //allContourImage.DrawPolyline(allBoundries.ToArray(), true, new Bgr(Color.Red), 2);
            //allContourImage.DrawPolyline(allObjects.ToArray(), true, new Bgr(Color.LightGreen), 2);
            //foreach (var box in boxList)
            //{
            //    allContourImage.Draw(box.GetVertices().Select(x => new Point((int)x.X, (int)x.Y)).ToArray(), new Bgr(Color.Aqua), 2);
            //}
        }
        public void FindLicensePlateAndCharacters(VectorOfVectorOfPoint contours, int[,] hierachy, int idx, Mat gray, Mat canny, List <string> licenses)
        {
            for (; idx >= 0; idx = hierachy[idx, 0])
            {
                int numberOfChildren = GetNumberOfChildren(hierachy, idx);
                //if it does not contains any children (charactor), it is not a license plate region
                if (numberOfChildren == 0)
                {
                    continue;
                }

                using (VectorOfPoint contour = contours[idx])
                {
                    if (CvInvoke.ContourArea(contour) > 200)
                    {
                        if (numberOfChildren < 3)
                        {
                            //    If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor)
                            //    However we should search the children of this contour to see if any of them is a license plate
                            FindLicensePlateAndCharacters(contours, hierachy, hierachy[idx, 2], gray, canny, licenses);
                            continue;
                        }

                        RotatedRect box = CvInvoke.MinAreaRect(contour);
                        if (box.Angle < -45.0)
                        {
                            float tmp = box.Size.Width;
                            box.Size.Width  = box.Size.Height;
                            box.Size.Height = tmp;
                            box.Angle      += 90.0f;
                        }
                        else if (box.Angle > 45.0)
                        {
                            float tmp = box.Size.Width;
                            box.Size.Width  = box.Size.Height;
                            box.Size.Height = tmp;
                            box.Angle      -= 90.0f;
                        }

                        double whRatio = (double)box.Size.Width / box.Size.Height;
                        if (!(3.0 < whRatio && whRatio < 10.0))
                        //if (!(1.0 < whRatio && whRatio < 2.0))
                        {
                            //if the width height ratio is not in the specific range,it is not a license plate
                            //However we should search the children of this contour to see if any of them is a license plate
                            //Contour<Point> child = contours.VNext;
                            if (hierachy[idx, 2] > 0)
                            {
                                FindLicensePlateAndCharacters(contours, hierachy, hierachy[idx, 2], gray, canny, licenses);
                            }
                            continue;
                        }

                        using (UMat tmp1 = new UMat())
                            using (UMat tmp2 = new UMat())
                            {
                                PointF[] srcCorners = box.GetVertices();

                                PointF[] destCorners = new PointF[] {
                                    new PointF(0, box.Size.Height - 1),
                                    new PointF(0, 0),
                                    new PointF(box.Size.Width - 1, 0),
                                    new PointF(box.Size.Width - 1, box.Size.Height - 1)
                                };

                                using (Mat rot = CameraCalibration.GetAffineTransform(srcCorners, destCorners))
                                    CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size));

                                //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract
                                Size   approxSize = new Size(240, 180);
                                double scale      = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height);
                                Size   newSize    = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale));
                                CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic);
                                //removes some pixels from the edge
                                int       edgePixelSize = 2;
                                Rectangle newRoi        = new Rectangle(new Point(edgePixelSize, edgePixelSize),
                                                                        tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize));
                                UMat        plate             = new UMat(tmp2, newRoi);
                                List <UMat> filteredCharaters = FilterPlate.GetCharacters(plate);


                                _plateViewModel.filteredCharatersInSinglePlate.Add(filteredCharaters);
                                _plateViewModel.DetectedPlates.Add(plate.Bitmap.ToBitmapImage());
                                foreach (var character in filteredCharaters)
                                {
                                    _plateViewModel.FilteredDetectedCharacters.Add(character.Bitmap.ToBitmapImage());
                                }
                            }
                    }
                }
            }
        }
示例#21
0
        // 填充缺陷轮廓
        private Image <Bgr, byte> ContourFilling3(Image <Bgr, byte> pic)
        {
            Image <Bgr, byte> outpic = new Image <Bgr, byte>(pic.Size);
            Image <Ycc, byte> ycc    = pic.Convert <Ycc, byte>();

            for (int i = 0; i < ycc.Height; i++)
            {
                for (int j = 0; j < ycc.Width; j++)
                {
                    if (ycc[i, j].Cr > 35 && ycc[i, j].Cr < 148 &&
                        ycc[i, j].Cb > 48 && ycc[i, j].Cb < 141)
                    {
                        ycc[i, j] = new Ycc(0, 0, 0);
                    }
                    else
                    {
                        ycc[i, j] = new Ycc(255, 255, 255);
                    }
                }
            }
            Image <Gray, byte> gray = ycc.Convert <Gray, byte>();

            gray = gray.ThresholdBinary(new Gray(100), new Gray(255));
            gray = gray.Canny(100, 60);
            Image <Gray, byte>    outcon = new Image <Gray, byte>(pic.Size);
            VectorOfVectorOfPoint con    = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(gray, con, outcon, RetrType.External, ChainApproxMethod.ChainApproxNone);
            int n = 0;

            for (int i = 0; i < con.Size; i++)
            {
                if (CvInvoke.ContourArea(con[i]) > 0)
                {
                    n++;
                }
            }
            textBox1.Text = "共" + n.ToString() + "个缺陷" + "      " + "\n";
            n             = 0;
            for (int i = 0; i < con.Size; i++)
            {
                if (CvInvoke.ContourArea(con[i]) > 0)
                {
                    CvInvoke.DrawContours(outpic, con, i, new MCvScalar(0, 255, 0), 5);
                    textBox1.Text = textBox1.Text + "第" + (++n).ToString() + "个缺陷的面积为" + CvInvoke.ContourArea(con[i]) + "    \n";
                }
            }
            CvInvoke.AddWeighted(outpic, 0.5, picture, 0.5, 0, outpic);
            return(outpic);
        }
示例#22
0
        private void Btn_anwserReg_Click(object sender, EventArgs e)
        {
            if (this.ib_middleCut.Image == null)
            {
                MessageBox.Show("裁剪图片不能为空");
                return;
            }

            CVHelper commonUse = new CVHelper();

            Mat src = new Image <Bgr, byte>(ib_middleCut.Image.Bitmap).Mat;// new Mat();

            //CvInvoke.PyrMeanShiftFiltering(src1, src, 25, 10, 1, new MCvTermCriteria(5, 1));
            //commonUse.SaveMat(src, "降噪后");
            //commonUse.SaveMat(src1, "降噪后原始");

            Mat dst      = new Mat();
            Mat src_gray = new Mat();

            CvInvoke.CvtColor(src, src_gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
            //存储灰度图片
            commonUse.SaveMat(src_gray, "灰度");

            #region 二值化
            //二值化
            Mat mat_threshold = new Mat();
            int myThreshold   = Convert.ToInt32(num_threshold.Value);
            CvInvoke.Threshold(src_gray, mat_threshold, myThreshold, 255, Emgu.CV.CvEnum.ThresholdType.BinaryInv);
            commonUse.SaveMat(mat_threshold, "二值化");
            //思路 close -腐蚀-腐蚀-膨胀
            //形态学膨胀
            Mat mat_dilate = commonUse.MyDilate(mat_threshold, Emgu.CV.CvEnum.MorphOp.Close);
            commonUse.SaveMat(mat_dilate, "形态学膨胀");
            //mat_dilate = commonUse.MyDilate(mat_dilate, Emgu.CV.CvEnum.MorphOp.Close);
            //commonUse.SaveMat(mat_dilate, "形态学膨胀1");
            mat_dilate = commonUse.MyDilate(mat_dilate, Emgu.CV.CvEnum.MorphOp.Erode);
            commonUse.SaveMat(mat_dilate, "形态学膨胀腐蚀1");
            mat_dilate = commonUse.MyDilate(mat_dilate, Emgu.CV.CvEnum.MorphOp.Erode);
            commonUse.SaveMat(mat_dilate, "形态学膨胀腐蚀2");

            mat_dilate = commonUse.MyDilate(mat_dilate, Emgu.CV.CvEnum.MorphOp.Dilate);
            commonUse.SaveMat(mat_dilate, "形态学膨胀2");
            #endregion

            //边缘检测
            CvInvoke.Canny(mat_dilate, dst, Convert.ToInt32(this.num_Min.Value), Convert.ToInt32(this.num_Max.Value), Convert.ToInt32(this.num_apertureSize.Value));
            commonUse.SaveMat(dst, "边缘检测");

            //寻找答题卡矩形边界(所有的矩形)
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();      //创建VectorOfVectorOfPoint数据类型用于存储轮廓

            VectorOfVectorOfPoint validContours = new VectorOfVectorOfPoint(); //有效的,所有的选项的

            CvInvoke.FindContours(dst, contours, null, Emgu.CV.CvEnum.RetrType.Ccomp,
                                  Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple, new Point(8, 8));//提取轮廓

            //打印所以后矩形面积和周长
            int size = contours.Size;
            for (int i = 0; i < size; i++)
            {
                var item     = contours[i];
                var tempArea = CvInvoke.ContourArea(item);
                var tempArc  = CvInvoke.ArcLength(item, true);
                Console.WriteLine($"面积:{tempArea};周长:{tempArc}");;
                if (tempArea > 200 && tempArea < 2000)
                {
                    validContours.Push(item);
                }
            }

            //CvInvoke.ApproxPolyDP

            //画出所有轮廓
            Mat middleMat = new Image <Bgr, byte>(this.ib_middleCut.Image.Bitmap).Mat;
            CvInvoke.DrawContours(middleMat, validContours, -1, new MCvScalar(0, 0, 255), 1);
            this.ib_result.Image = middleMat;
            commonUse.SaveMat(middleMat, "画出所有轮廓的");

            //画出所有矩形
            Mat tmpMat = new Image <Bgr, byte>(this.ib_middleCut.Image.Bitmap).Mat;
            List <Rectangle> rectangles = commonUse.GetRectList(validContours, false);

            rectangles.ForEach(rect =>
            {
                CvInvoke.Rectangle(tmpMat, rect, new MCvScalar(0, 0, 255));
            });

            commonUse.SaveMat(tmpMat, "画出所有矩形轮廓的");

            this.ib_result.Image = tmpMat;
        }
示例#23
0
        FileOperation fileOperation = new FileOperation();//实例化处理文本的类

        /// <summary>
        /// 获取各区块的轮廓
        /// </summary>
        public void getContours(TextBox txtTypeName, PictureBox ptb) //找最近的轮廓
        {
            GLB.Match_success = false;                               //重新检测赋值
            Image <Gray, byte> dnc         = new Image <Gray, byte>(GLB.BUFW, GLB.BUFH);
            Image <Gray, byte> threshImage = new Image <Gray, byte>(GLB.BUFW, GLB.BUFH);

            CvInvoke.CvtColor(GLB.frame, threshImage, ColorConversion.Bgra2Gray);//灰度化
            //CvInvoke.BilateralFilter(threshImage, threshImage, 10, 10, 4);//双边滤波
            //CvInvoke.GaussianBlur(threshImage, threshImage, new Size(3, 3), 4);//高斯滤波
            CvInvoke.BoxFilter(threshImage, threshImage, Emgu.CV.CvEnum.DepthType.Cv8U, new Size(3, 3), new Point(-1, -1));//方框滤波
            #region
            //var kernal1 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));
            //CvInvoke.Dilate(threshImage, threshImage, kernal1, new Point(-1, -1), 2, BorderType.Default, new MCvScalar());//膨胀
            //var kernal1 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));
            //CvInvoke.Erode(threshImage, threshImage, kernal1, new Point(-1, -1), 2, BorderType.Default, new MCvScalar());//腐蚀

            //方式1
            //CvInvoke.Threshold(threshImage, threshImage, 100, 255, ThresholdType.BinaryInv | ThresholdType.Otsu);//二值化
            //if (Mainform.runMode == 6)//匹配托盘
            //{
            //    var kernal1 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(9, 9), new Point(-1, -1));
            //    CvInvoke.Erode(threshImage, threshImage, kernal1, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());//腐蚀

            //}
            //else//匹配箱子
            //{
            //    var kernal1 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));
            //    CvInvoke.Erode(threshImage, threshImage, kernal1, new Point(-1, -1), 2, BorderType.Default, new MCvScalar());//腐蚀
            //}

            //方式2
            //if (Mainform.runMode == 6)//匹配托盘
            //{
            //    var kernal1 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(9, 9), new Point(-1, -1));
            //    CvInvoke.Dilate(threshImage, threshImage, kernal1, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());//膨胀
            //}
            //else //加了膨胀跳动更大
            //{
            //    var kernal1 = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1));
            //    CvInvoke.Dilate(threshImage, threshImage, kernal1, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());//膨胀
            //}
            //ptb.Image = threshImage.ToBitmap();
            #endregion
            //检测连通域,每一个连通域以一系列的点表示,FindContours方法只能得到第一个域:
            try
            {
                VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(2000);                                       //区块集合
                CvInvoke.FindContours(threshImage, contours, dnc, RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple); //轮廓集合
                GLB.block_num = 0;

                Dictionary <int, VectorOfPoint> mycontours = new Dictionary <int, VectorOfPoint>(100);//序号,轮廓
                mycontours.Clear();
                for (int k = 0; k < contours.Size; k++)
                {
                    double area = CvInvoke.ContourArea(contours[k]); //获取各连通域的面积
                    if (area > 100000 && area < 800000)              //根据面积作筛选(指定最小面积,最大面积):
                    {
                        if (!mycontours.ContainsKey(k))
                        {
                            mycontours.Add(k, contours[k]);
                        }
                    }
                }
                float my_depth_temp = GLB.myp3d[(GLB.BUFH / 2 * GLB.BUFW + GLB.BUFW / 2) * 3 + 2];
                if (mycontours.Count == 0 && Mainform.ProduceArrive == true && Mainform.CarryMode == 0 && Mainform.runMode == 1 && (my_depth_temp > 1400 || double.IsNaN(my_depth_temp)))//空车来,小车自动离开
                {
                    Mainform.ProduceArrive = false;
                    Mainform.SetCarryArrive(0);                                                                                                                                                                           //修改产品没送到
                    ArrayList array = new ArrayList();                                                                                                                                                                    //多条SQL语句数组
                    string    sql   = "update Agv_list set isworking =0,stowerid ='',pronum =0 where agvid in(select agvid from Agvmission_list where fstatus =7 and messionType =1 and stowerid='" + GLB.RobotId + "')"; //修改小车状态
                    string    sql1  = "update Agvmission_list set fstatus =6 ,actionenddate=getdate() where fstatus =7 and messionType =1  and stowerid='" + GLB.RobotId + "'";                                           //修改任务 等待状态为完成状态
                    array.Add(sql);
                    array.Add(sql1);
                    bool isok = MyDataLib.transactionOp_list(array);
                    Mainform.SetRobotStatus(2, "等待送货");//修改码垛机器人状态
                }
                //按面积最大排序 生成新的字典
                Dictionary <int, VectorOfPoint> mycontours_SortedByKey = new Dictionary <int, VectorOfPoint>(100);//序号,轮廓;
                mycontours_SortedByKey.Clear();
                mycontours_SortedByKey = mycontours.OrderByDescending(o => CvInvoke.ContourArea(o.Value)).ToDictionary(p => p.Key, o => o.Value);
                GLB.obj.Clear();
                foreach (int k in mycontours_SortedByKey.Keys)
                {
                    OBJ obj = new OBJ();
                    {
                        if (!GLB.obj.ContainsKey(GLB.block_num))
                        {
                            GLB.obj.Add(GLB.block_num, obj);                                  //不含这个,就添加
                        }
                        GLB.obj[GLB.block_num].typName = txtTypeName.Text.Replace(" ", "");   // 对象名称

                        if (getMinAreaRect(mycontours_SortedByKey[k], GLB.block_num) == true) //获取最小外接矩形并处理相关参数
                        {
                            if (GLB.img_mode == 0)                                            //匹配模式
                            {
                                if (Device_Macth(GLB.block_num) == true)                      //与库对比,生成工件位置,法向量,旋转角
                                {
                                    Thread.Sleep(400);
                                    break;
                                }
                            }
                        }

                        GLB.TitleStr += "block_num=" + GLB.block_num;
                        GLB.block_num++;//区块计数器
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine("发生错误: " + ex.Message);
                throw;
            }
        }
示例#24
0
        /// <summary>
        /// Detect basic elements (such as circlr, line, rectangle and triangle)
        /// </summary>
        /// <param name="argPath"></param>
        /// <param name="argtMode"></param>
        /// <returns></returns>
        public static DetectBasicEleementResult DetectBasicElement(string argPath, DetectMode argtMode)
        {
            StringBuilder msgBuilder = new StringBuilder("Performance: ");

            //Load the image from file and resize it for display
            Image <Bgr, byte> img = new Image <Bgr, byte>(argPath).Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true);

            //Convert the image to grayscale and filter out the noise
            UMat uimage = new UMat();

            CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);

            //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp();

            #region circle detection

            CircleF[] circles        = null;
            Stopwatch watch          = new Stopwatch();
            double    cannyThreshold = 180.0;
            if (argtMode == DetectMode.IncludeCircle)
            {
                watch = Stopwatch.StartNew();
                double circleAccumulatorThreshold = 120;
                circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);

                watch.Stop();
                msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));
            }
            #endregion

            #region Canny and edge detection
            watch.Reset(); watch.Start();
            double cannyThresholdLinking = 120.0;
            UMat   cannyEdges            = new UMat();
            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);

            LineSegment2D[] lines = CvInvoke.HoughLinesP(cannyEdges,
                                                         1,              //Distance resolution in pixel-related units
                                                         Math.PI / 45.0, //Angle resolution measured in radians.
                                                         20,             //threshold
                                                         30,             //min Line width
                                                         10);            //gap between lines

            watch.Stop();
            msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            #region Find triangles and rectangles
            watch.Reset(); watch.Start();
            List <Triangle2DF> triangleList = new List <Triangle2DF>();
            List <RotatedRect> boxList      = new List <RotatedRect>();      //a box is a rotated rectangle

            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                int count = contours.Size;
                for (int i = 0; i < count; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                        using (VectorOfPoint approxContour = new VectorOfPoint())
                        {
                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                            if (CvInvoke.ContourArea(approxContour, false) > 250)    //only consider contours with area greater than 250
                            {
                                if (approxContour.Size == 3)                         //The contour has 3 vertices, it is a triangle
                                {
                                    Point[] pts = approxContour.ToArray();
                                    triangleList.Add(new Triangle2DF(pts[0], pts[1], pts[2]));
                                }
                                else if (approxContour.Size == 4)                         //The contour has 4 vertices.
                                {
                                    #region determine if all the angles in the contour are within [80, 100] degree
                                    bool            isRectangle = true;
                                    Point[]         pts         = approxContour.ToArray();
                                    LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                    for (int j = 0; j < edges.Length; j++)
                                    {
                                        double angle = Math.Abs(edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                        if (angle < 80 || angle > 100)
                                        {
                                            isRectangle = false;
                                            break;
                                        }
                                    }
                                    #endregion

                                    if (isRectangle)
                                    {
                                        boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                    }
                                }
                            }
                        }
                }
            }

            watch.Stop();
            msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            return(new DetectBasicEleementResult(img, triangleList, boxList, circles, lines, msgBuilder.ToString()));
        }
示例#25
0
        private void button2_Click(object sender, EventArgs e)  //procesiranje slike
        {
            c = 0; fields = new List <Image <Gray, byte> >(); grid = new int[9, 9];
            OpenFileDialog opf = new OpenFileDialog();

            if (opf.ShowDialog() != DialogResult.OK)
            {
                return;
            }
            Image <Gray, Byte> gray = new Image <Gray, byte>(opf.FileName);

            imageBox1.Image = gray.Clone();
            Image <Gray, Byte> izhod = new Image <Gray, byte>(gray.Width, gray.Height);

            // binariziranje(adaptivna pragovna segmentacija)
            izhod = gray.ThresholdAdaptive(new Gray(255), Emgu.CV.CvEnum.AdaptiveThresholdType.MeanC, Emgu.CV.CvEnum.ThresholdType.BinaryInv, 11, new Gray(11));
            izhod._SmoothGaussian(1); //glajenje, (ni obvezno potrebno)
            // imageBox2.Image = izhod.Clone();
            var nova = izhod.Clone();
            //posopek za iskanje najvecjega povezanega objekta
            VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint();
            Mat hierarchy             = new Mat();

            CvInvoke.FindContours(izhod, vvp, hierarchy, Emgu.CV.CvEnum.RetrType.Tree, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);
            int           largest_contour_index = 0;
            double        largest_area          = 0;
            VectorOfPoint largestContour;

            for (int i = 0; i < vvp.Size; i++)
            {
                double a = CvInvoke.ContourArea(vvp[i], false);
                if (a > largest_area)
                {
                    largest_area          = a;
                    largest_contour_index = i;
                }
            }
            largestContour = vvp[largest_contour_index];
            Point[] lc = largestContour.ToArray();
            //iskanje kote za perspektivna transformacija
            Point topleft  = new Point(gray.Width, gray.Height);
            Point topright = new Point(0, gray.Height);
            Point botright = new Point(0, 0);
            Point botleft  = new Point(gray.Width, 0);

            foreach (Point p in lc)
            {
                if ((p.X + p.Y) < (topleft.X + topleft.Y))
                {
                    topleft = p;
                }
                else if ((p.X - p.Y) > (topright.X - topright.Y))
                {
                    topright = p;
                }
                else if ((p.X + p.Y) > (botright.X + botright.Y))
                {
                    botright = p;
                }
                else if ((p.Y - p.X) > (botleft.Y - botleft.X))
                {
                    botleft = p;
                }
            }
            //prerisemo gridlines, da se znebimo linije, potem ostanejo le stevilke(lazja razpoznava)
            CvInvoke.DrawContours(nova, vvp, largest_contour_index, new MCvScalar(0, 0, 0), 6, Emgu.CV.CvEnum.LineType.EightConnected, hierarchy, 1);
            Image <Gray, Byte> warp = new Image <Gray, byte>(450, 450);

            PointF[] src     = new PointF[] { topleft, topright, botright, botleft };
            PointF[] dst     = new PointF[] { new Point(0, 0), new Point(450, 0), new Point(450, 450), new Point(0, 450) };
            Mat      warpmat = CvInvoke.GetPerspectiveTransform(src, dst);     //racunamo matrika za transformacija

            CvInvoke.WarpPerspective(nova, warp, warpmat, new Size(450, 450)); //izvedemo transformacija
            //imageBox1.Image = nova;
            imageBox2.Image = warp;
            //warp._Erode(1); //krcenje ali sirjenje, ni potrebno
            //warp._Dilate(1);

            //razpoznava stevilk, 2 moznosti (izbira so radiogumbov)
            if (radioButton1.Checked)
            {
                tess = new Tesseract(@"C:/Emgu/emgucv-windows-universal 3.0.0.2157/bin/", null, OcrEngineMode.Default, "123456789 ");
            }

            fields = new List <Image <Gray, byte> >(); //hranim polja za lazjo debagiranje
            for (int i = 0; i < 9; i++)
            {
                for (int j = 0; j < 9; j++)
                {
                    Image <Gray, Byte> temp = (warp.GetSubRect(new Rectangle(j * 50 + 3, i * 50 + 3, 44, 44))).Clone(); //malo izpustimo po robu polja
                    temp._SmoothGaussian(1);
                    Gray sum = temp.GetSum();                                                                           //ce ni dovolj beli pikslov(dele objektov), ni stevilka.
                    if (sum.Intensity < 30000)
                    {
                        continue;
                    }

                    //spet iscemo najvecji element v polju, predvidevam da je stevilo
                    VectorOfVectorOfPoint vvptemp = new VectorOfVectorOfPoint();
                    Mat hierarchytemp             = new Mat();
                    CvInvoke.FindContours(temp, vvptemp, hierarchytemp, Emgu.CV.CvEnum.RetrType.Tree, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);
                    int           ind  = 0;
                    double        area = 100;
                    VectorOfPoint contour;
                    for (int k = 0; k < vvptemp.Size; k++)
                    {
                        double ar = CvInvoke.ContourArea(vvptemp[k], false);
                        if (ar > area)
                        {
                            area = ar;
                            ind  = k;
                        }
                    }
                    if (area == 100)
                    {
                        continue;           //ce je najvecjega kontura area manj kot 100 (polje je 44x44), povzamemem da ni stevilo
                    }
                    contour = vvptemp[ind]; //kontura stevlike

                    var tempimg = new Image <Gray, Byte>(44, 44, new Gray(0));
                    CvInvoke.DrawContours(tempimg, vvptemp, ind, new MCvScalar(255, 0, 0), -1, Emgu.CV.CvEnum.LineType.EightConnected, hierarchytemp);
                    //narisemo notranjosti najvecjega kontura v novi sliki z belo barvo
                    fields.Add(tempimg); //dodamo za pogled
                    if (radioButton2.Checked)
                    {
                        Rectangle br       = CvInvoke.BoundingRectangle(contour);
                        int       indeks   = 0;
                        double    vrednost = double.MaxValue;
                        for (int q = 0; q < 9; q++)
                        {   //racunamo podobnost s vsakem vzorcu
                            var kraj      = tempimg.GetSubRect(new Rectangle(br.X, br.Y, vzorci[q].Width, vzorci[q].Height));
                            var pod       = vzorci[q].AbsDiff(kraj);
                            var podobnost = pod.GetSum();
                            if (podobnost.Intensity < vrednost)
                            {
                                indeks   = q + 1; //ker je zero based
                                vrednost = podobnost.Intensity;
                            }
                        }
                        grid[i, j] = indeks;//najbolj podobni je zaznana stevilka
                    }
                    else
                    {
                        tess.Recognize(tempimg); //raspoznava slike s pomocjo tesseract OCR vgrajen v openCV
                        var x = tess.GetCharacters();
                        if (x.Length == 1)
                        {
                            grid[i, j] = Convert.ToInt32(x[0].Text);
                        }
                    }
                }
            }
            NarisiStevilke(); //izpisemo stevilki v polje
        }
        private List <Result> DetectBanknotesTrain(Mat image, float minimumMatchAllowed = 0.07f, float minimuTargetAreaPercentage = 0.05f, float maxDistanceRatio = 0.75f, float reprojectionThresholPercentage = 0.01f,
                                                   double confidence = 0.99, int maxIters = 5000, int minimumNumerInliers = 8)
        {
            object locker = new object();

            List <Result> detectorResults = new List <Result>();

            MKeyPoint[] mKeyPoints;
            SIFT        sift = new SIFT();

            mKeyPoints = sift.Detect(image);
            VectorOfKeyPoint keypointsEvalImage = new VectorOfKeyPoint();

            keypointsEvalImage.Push(mKeyPoints);

            if (keypointsEvalImage.Size < 4)
            {
                return(detectorResults);
            }

            Mat descriptorsEvalImage = new Mat();

            sift.Compute(image, keypointsEvalImage, descriptorsEvalImage);

            Features2DToolbox.DrawKeypoints(image, keypointsEvalImage, image, new Bgr(0, 0, 255), Features2DToolbox.KeypointDrawType.Default);

            float  bestMatch          = 0;
            Result bestDetectorResult = new Result();

            int   trainDetectorsSize    = DetectedBanknotes.Count;
            bool  validDetection        = true;
            float reprojectionThreshold = image.Cols * reprojectionThresholPercentage;

            do
            {
                bestMatch = 0;

                Parallel.For(0, trainDetectorsSize, i =>
                {
                    DetectedBanknotes[(int)i].UpdateCurrentLODIndex(ref image, 0.6999999881F);
                    Result detectorResult = DetectedBanknotes[(int)i].AnalyzeImageEval(ref keypointsEvalImage, ref descriptorsEvalImage, maxDistanceRatio, reprojectionThreshold, confidence, maxIters, minimumNumerInliers);
                    if (detectorResult.GetBestROIMatch() > minimumMatchAllowed)
                    {
                        float contourArea           = (float)CvInvoke.ContourArea(detectorResult.GetTrainContour());
                        float imageArea             = (float)(image.Cols * image.Rows);
                        float contourAreaPercentage = contourArea / imageArea;

                        if (contourAreaPercentage > minimuTargetAreaPercentage)
                        {
                            double contourAspectRatio = _util.ComputeContourAspectRatio(detectorResult.GetTrainContour());
                            if (contourAspectRatio > _contourAspectRatioRange.X && contourAspectRatio < _contourAspectRatioRange.Y)
                            {
                                double contourCircularity = _util.ComputeContourCircularity(detectorResult.GetTrainContour());
                                if (contourCircularity > _contourCircularityRange.X && contourCircularity < _contourCircularityRange.Y)
                                {
                                    if (CvInvoke.IsContourConvex(detectorResult.GetTrainContour()))
                                    {
                                        lock (locker)
                                        {
                                            if (detectorResult.GetBestROIMatch() > bestMatch)
                                            {
                                                bestMatch          = detectorResult.GetBestROIMatch();
                                                bestDetectorResult = detectorResult;
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                });

                validDetection = bestMatch > minimumMatchAllowed && bestDetectorResult.GetInliers().Size > minimumNumerInliers;

                if (bestDetectorResult != null && validDetection)
                {
                    detectorResults.Add(bestDetectorResult);
                    _util.RemoveInliersFromKeypointsAndDescriptors(bestDetectorResult.GetInliers(), ref keypointsEvalImage, ref descriptorsEvalImage);
                }
            } while (validDetection);

            return(detectorResults);
        }
示例#27
0
        private void Processing(Image <Bgr, byte> ImgSource, TextColor Color = TextColor.White)
        {
            Rectangle ROICode = new Rectangle();

            mImgDetected = ImgSource.Copy();
            // create and ROI image
            Rectangle ROI = new Rectangle(ImgSource.Width / 2, ImgSource.Height / 10, ImgSource.Width, ImgSource.Height / 4);

            mImgDetected.ROI = ROI;
            // filter noise
            //detect code
            using (Image <Gray, byte> imgGray = mImgDetected.Convert <Gray, byte>())
            {
                using (Image <Gray, byte> imgFilter = new Image <Gray, byte>(imgGray.Size))
                {
                    CvInvoke.BilateralFilter(imgGray, imgFilter, 9, 49, 49);
                    using (Mat k = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1)))
                    {
                        if (Color == TextColor.White)
                        {
                            CvInvoke.Erode(imgFilter, imgFilter, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
                        }
                        else
                        {
                            CvInvoke.Dilate(imgFilter, imgFilter, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
                        }
                    }
                    using (Image <Gray, double> ImgSobel = new Image <Gray, double>(imgFilter.Size))
                    {
                        CvInvoke.Sobel(imgFilter, ImgSobel, Emgu.CV.CvEnum.DepthType.Cv64F, 1, 0, kSize: 1);
                        CvInvoke.ConvertScaleAbs(ImgSobel, imgFilter, 2, 0);
                        CvInvoke.Threshold(imgFilter, imgFilter, 20, 255, Emgu.CV.CvEnum.ThresholdType.Binary);

                        using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                        {
                            CvInvoke.FindContours(imgFilter, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                            for (int i = 0; i < contours.Size; i++)
                            {
                                double    s     = CvInvoke.ContourArea(contours[i]);
                                Rectangle bound = CvInvoke.BoundingRectangle(contours[i]);
                                if (bound.Height > 65 || s < 10)
                                {
                                    CvInvoke.DrawContours(imgFilter, contours, i, new MCvScalar(0), -1);
                                }
                            }
                        }
                        using (Mat k = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(107, 1), new Point(-1, -1)))
                        {
                            CvInvoke.MorphologyEx(imgFilter, imgFilter, Emgu.CV.CvEnum.MorphOp.Close, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
                        }
                        using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                        {
                            CvInvoke.FindContours(imgFilter, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                            double large_area  = 0;
                            int    index_large = 0;
                            for (int i = 0; i < contours.Size; i++)
                            {
                                double s = CvInvoke.ContourArea(contours[i]);
                                if (large_area < s)
                                {
                                    large_area  = s;
                                    index_large = i;
                                }
                            }
                            Rectangle boxFirstLine  = CvInvoke.BoundingRectangle(contours[index_large]);
                            Rectangle boxSecondLine = new Rectangle();
                            for (int i = 0; i < contours.Size; i++)
                            {
                                Rectangle b = CvInvoke.BoundingRectangle(contours[i]);
                                if (b.Y - boxFirstLine.Y < 120 && b.Y - boxFirstLine.Y > 0 && b.Width > 30)
                                {
                                    boxSecondLine = CvInvoke.BoundingRectangle(contours[i]);
                                    break;
                                }
                            }
                            ROICode        = new Rectangle(boxFirstLine.X - 20, boxFirstLine.Y - 20, boxFirstLine.Width + 40, boxSecondLine.Y + boxSecondLine.Height + 60 - boxFirstLine.X);
                            ROICode.X      = ROICode.X < 0 ? 0: ROICode.X;
                            ROICode.Y      = ROICode.Y < 0 ? 0 : ROICode.Y;
                            ROICode.Width  = ROICode.X + ROICode.Width > mImgDetected.Width ? mImgDetected.Width - ROICode.X : ROICode.Width;
                            ROICode.Height = ROICode.Y + ROICode.Height > mImgDetected.Height ? mImgDetected.Height - ROICode.Y : ROICode.Height;
                            mImgCroped     = mImgDetected.Copy();
                            mImgCroped.ROI = ROICode;
                            CvInvoke.Rectangle(mImgDetected, ROICode, new MCvScalar(255, 0, 0), 3);
                            mImgDetected.ROI = new Rectangle();
                            imb3.Image       = mImgCroped.Bitmap;
                        }
                    }
                }
            }
            // segment char text
            mImgSegment     = new Image <Gray, byte>(mImgCroped.Size);
            mImgCharSegment = mImgCroped.Copy();
            CvInvoke.CvtColor(mImgCroped, mImgSegment, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
            using (Mat k = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1)))
            {
                CvInvoke.MorphologyEx(mImgSegment, mImgSegment, Emgu.CV.CvEnum.MorphOp.Open, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
            }
            Image <Gray, byte> img_decode = mImgSegment.Copy();

            CvInvoke.BitwiseNot(img_decode, img_decode);
            CvInvoke.Imwrite("test.png", img_decode);
            CvInvoke.Threshold(mImgSegment, mImgSegment, 127, 255, Emgu.CV.CvEnum.ThresholdType.Binary);
            using (Mat k = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1)))
            {
                CvInvoke.MorphologyEx(mImgSegment, mImgSegment, Emgu.CV.CvEnum.MorphOp.Open, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
            }
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(mImgSegment, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                for (int i = 0; i < contours.Size; i++)
                {
                    Rectangle bound = CvInvoke.BoundingRectangle(contours[i]);
                    if (bound.Height > 60 || bound.Height < 30 || bound.Width > 35)
                    {
                        CvInvoke.DrawContours(mImgSegment, contours, i, new MCvScalar(0), -1);
                    }
                }
            }
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(mImgSegment, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                for (int i = 0; i < contours.Size; i++)
                {
                    Rectangle bound = CvInvoke.BoundingRectangle(contours[i]);
                    CvInvoke.Rectangle(mImgCharSegment, bound, new MCvScalar(0, 255, 0), 2);
                }
            }
            CvInvoke.Threshold(mImgSegment, mImgSegment, 127, 255, Emgu.CV.CvEnum.ThresholdType.BinaryInv);
            imb4.Image = mImgSegment.Bitmap;
            imb5.Image = mImgCharSegment.Bitmap;
            string code = Read(img_decode);

            Console.WriteLine(code);
            imb2.Image = mImgDetected.Bitmap;
        }
示例#28
0
    private void DetectBall(Mat image, Image <Hsv, byte> img, Hsv seuilBas, Hsv seuilHaut, String suffix)
    {
        //Left Bin Image
        Mat imageBinLeft = img.InRange(seuilBas, seuilHaut).Mat;

        int                   operationSize      = 1;
        Mat                   structuringElement = CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(2 * operationSize + 1, 2 * operationSize + 1), new Point(operationSize, operationSize));
        MCvScalar             constante          = new MCvScalar(10);
        MCvScalar             colorConst         = new MCvScalar(100, 255, 100);
        VectorOfVectorOfPoint contourObject      = new VectorOfVectorOfPoint();
        int                   indexBestContour   = -1;

        CvInvoke.Erode(imageBinLeft, imageBinLeft, structuringElement, new Point(operationSize, operationSize), 3, BorderType.Default, constante);
        CvInvoke.Dilate(imageBinLeft, imageBinLeft, structuringElement, new Point(operationSize, operationSize), 3, BorderType.Default, constante);
        CvInvoke.FindContours(imageBinLeft, contourObject, null, RetrType.Ccomp, ChainApproxMethod.ChainApproxNone);

        for (int i = 0; i < contourObject.Size; i++)
        {
            if (i == 0)
            {
                longMaxContour   = CvInvoke.ContourArea(contourObject[i]);
                indexBestContour = 0;
            }
            else
            {
                if (longMaxContour < CvInvoke.ContourArea(contourObject[i]))
                {
                    longMaxContour   = CvInvoke.ContourArea(contourObject[i]);
                    indexBestContour = i;
                }
            }
        }
        if (indexBestContour > -1)
        {
            double area = CvInvoke.ContourArea(contourObject[indexBestContour]);
            if (suffix == "Left")
            {
                leftZ = area / (image.Width * image.Height) * zNormalFactor;
            }
            else if (suffix == "Right")
            {
                rightZ = area / (image.Width * image.Height) * zNormalFactor;
            }
            else
            {
                throw new NotImplementedException();
            }
        }
        else
        {
            if (suffix == "Left")
            {
                leftZ = -1;
            }
            else if (suffix == "Right")
            {
                rightZ = -1;
            }
            else
            {
                throw new NotImplementedException();
            }
        }


        //CvInvoke.Imshow("Mon Image" + suffix, imageBinLeft);


        if (contourObject.Size > 0)
        {
            CvInvoke.DrawContours(img, contourObject, indexBestContour, colorConst, 2);
        }

        if (suffix == "Left")
        {
            RILeftHand.texture = ImageToTexture(imageBinLeft.Clone(), textureLeftHand);
        }
        else
        {
            RIRightHand.texture = ImageToTexture(imageBinLeft.Clone(), textureRightHand);
        }
    }
示例#29
0
        // Maximum Value, Minimum Value and their locations
        // Mean Color or Mean Intensity

        public void calcularRegionProps(Image <Gray, byte> inputRegionIMG, double AreaMin)
        {
            // Declaração do vetor de vetores de pontos
            Emgu.CV.Util.VectorOfVectorOfPoint vetordeVetdePontos = new Emgu.CV.Util.VectorOfVectorOfPoint();
            // Declaração de uma matriz
            Mat hierarquia = new Mat();

            // Aplicação da função FindContour
            CvInvoke.FindContours(
                inputRegionIMG                                       // Recebe a imagem de entrada
                , vetordeVetdePontos                                 // Recebe um vetor de pontos de contorno
                , hierarquia                                         // Recebe a hierarquia dos pontos
                , Emgu.CV.CvEnum.RetrType.Tree                       // Recebe o tipo de arvore e contornos
                , Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone   // Tip de aproximação aos contornos
                , new Point(0, 0)                                    // Offset do ponto, posso omitir ou declarar um ponto a 0 0
                );

            Image <Bgr, Byte> input = inputRegionIMG.Convert <Bgr, byte>();

            //Até aqui encontro o contorno. Deve ser só 1!!!, portanto deve ser o contorno 0,
            //mas mesmo assim vamos fazer um teste para ver qual o contorno a usar

            // Pontos buffer
            PointF buffer_Minx = new PointF(inputRegionIMG.Width, inputRegionIMG.Height);
            PointF buffer_MaxX = new PointF(0, 0);
            PointF buffer_MinY = new PointF(inputRegionIMG.Width, inputRegionIMG.Height);
            PointF buffer_MaxY = new PointF(0, 0);


            for (int i = 0; i < vetordeVetdePontos.Size; i++)
            {
                Area = Math.Abs(CvInvoke.ContourArea(vetordeVetdePontos[i], true));      // calcula a area do contorno

                if (Area >= AreaMin)
                {
                    for (int iter = 0; iter < vetordeVetdePontos[i].Size; iter++)
                    {
                        //----------------- Calculo do extreme -----------------
                        // Calcula o valor do ponto mais à esquerda
                        if (vetordeVetdePontos[i][iter].X < buffer_Minx.X)
                        {
                            buffer_Minx = vetordeVetdePontos[i][iter];
                        }

                        // Calcula o valor do ponto mais à direita
                        if (vetordeVetdePontos[i][iter].X > buffer_MaxX.X)
                        {
                            buffer_MaxX = vetordeVetdePontos[i][iter];
                        }

                        // Calcula o valor do ponto Y mais em cima
                        if (vetordeVetdePontos[i][iter].Y < buffer_MinY.Y)
                        {
                            buffer_MinY = vetordeVetdePontos[i][iter];
                        }

                        // Calcula o valor do ponto Y mais em baixo
                        if (vetordeVetdePontos[i][iter].Y > buffer_MaxY.Y)
                        {
                            buffer_MaxY = vetordeVetdePontos[i][iter];
                        }
                        //----------------- Fim do calculo do extreme -----------------
                    }

                    // ------------- Calculo do Centroid ---------------------
                    Moments momento = CvInvoke.Moments(vetordeVetdePontos[i]);
                    int     X       = (int)(momento.M10 / momento.M00);
                    int     Y       = (int)(momento.M01 / momento.M00);
                    Centroid = new PointF(X, Y);
                    // ------------------------------------------------------

                    // ------------ Calculo do AspectRatio ------------------
                    AspectRatio = inputRegionIMG.Width / inputRegionIMG.Height;
                    //-------------------------------------------------------

                    //------------- Calculo da BoundingBox ------------------
                    BoundingBox = CvInvoke.BoundingRectangle(vetordeVetdePontos[i]);
                    //-------------------------------------------------------

                    // ------------   Calculo do Extent   -------------------
                    float rect_area = BoundingBox.Width * BoundingBox.Height;
                    Extent = (float)Area / rect_area;
                    // ------------------------------------------------------

                    // --------------- ConvectHULL --------------------------
                    CvInvoke.ConvexHull(vetordeVetdePontos[i], ConvexHull, false);
                    //-------------------------------------------------------

                    // --------------- ConvectHULL_area ---------------------
                    ConvexHull_area = CvInvoke.ContourArea(ConvexHull);
                    //-------------------------------------------------------

                    //-----------------  Solidity ---------------------------
                    Solidity = Area / ConvexHull_area;
                    // ------------------------------------------------------

                    //-------------- Diametro Equivalente -------------------
                    EquivalentDiameter = Math.Sqrt(4 * Area / Math.PI);
                    // ------------------------------------------------------

                    //--------------- Circulo Envolvente --------------------
                    CirculoEnvolvente = CvInvoke.MinEnclosingCircle(vetordeVetdePontos[i]);
                    //-------------------------------------------------------

                    //--------------- Circulo Perimetro --------------------
                    perimetro = CvInvoke.ArcLength(vetordeVetdePontos[i], true);
                    // -----------------------------------------------------

                    // -------------- Circularity (Fator de forma)----------
                    Circularity = (4 * Math.PI * Area) / (perimetro * perimetro);
                    //------------------------------------------------------

                    // --------------- Verifica se é convexo ---------------
                    isConvex = CvInvoke.IsContourConvex(vetordeVetdePontos[i]);
                    //------------------------------------------------------

                    // ------------- Apriximação do contorno ---------------
                    CvInvoke.ApproxPolyDP(
                        vetordeVetdePontos[i],              // Cada vetor de um contorno iterado
                        ContourApproximation,               // Vetor que vai conter a aproximação
                        0.1 * perimetro,                    // Expande o perimetro
                        true                                // Calcula um aproximação ao contorno externo
                        );
                    // -----------------------------------------------------

                    // ------------- Devolve o contorno --------------------
                    Contorno = vetordeVetdePontos[i];
                    // ------------------------------------------------------

                    // ------------  Retangulo rodado  ---------------------
                    RotatedRect retanguloRodado = CvInvoke.MinAreaRect(vetordeVetdePontos[i]);
                    PointF[]    vetorPontos     = CvInvoke.BoxPoints(retanguloRodado);
                    BoundingBoxRectRodado = new Point[vetorPontos.Length];
                    for (int iterador = 0; iterador < vetorPontos.Length; iterador++)
                    {
                        BoundingBoxRectRodado[iterador].X = (int)vetorPontos[iterador].X;
                        BoundingBoxRectRodado[iterador].Y = (int)vetorPontos[iterador].Y;
                    }
                    // ------------ AnguloRecExterior ----------------------
                    AnguloRectExterior = retanguloRodado.Angle;
                    // -----------------------------------------------------

                    // ------------ EllipseImagem --------------------------
                    EllipseValores = CvInvoke.FitEllipseAMS(vetordeVetdePontos[i]);
                    // -----------------------------------------------------

                    // Fitting a Line ---------------
                    //---------------------------

                    // salta do ciclo for
                    i = vetordeVetdePontos.Size;
                }
            }

            Extreme.Mais_a_esquerda = buffer_Minx;
            Extreme.Mais_a_Direita  = buffer_MaxX;
            Extreme.Mais_em_baixo   = buffer_MaxY;
            Extreme.Mais_em_cima    = buffer_MinY;
        }
        private void Capture_ImageGrabbed(object sender, EventArgs e)
        {
            lock (Capture)
            {
                if (Busy)
                {
                    return;
                }
                Busy = true;
            }

            try
            {
                Capture.Retrieve(CameraFeed);
                var imageFrame = CameraFeed.ToImage <Bgr, byte>();

                #region smooth
                var imgSmoothed = imageFrame.PyrDown().PyrUp();
                imgSmoothed._SmoothGaussian(Smooth);
                imageBox2.Image = imgSmoothed;
                #endregion

                #region Color filter
                var imgColorFiltered = imgSmoothed.InRange(new Bgr(Bmin, Gmin, Rmin), new Bgr(Bmax, Gmax, Rmax));
                imgColorFiltered = imgColorFiltered.PyrDown().PyrUp();
                imgColorFiltered._SmoothGaussian(Smooth); // 3
                imageBox3.Image = imgColorFiltered;
                #endregion


                #region erosion, delete noise
                //var imageErode = imgColorFiltered.Erode(2);
                //imageBox4.Image = imageErode;
                #endregion

                #region
                //var imageDilate = imageErode.Dilate(5);
                //imageBox5.Image = imageDilate;
                #endregion

                #region opening
                // open > erosion and dilate
                Mat kernel  = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1));
                var opening = imgColorFiltered.MorphologyEx(MorphOp.Open, kernel, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(1.0));
                imageBox4.Image = opening;
                #endregion

                #region closing
                // open > erosion and dilate
                //Mat kernel = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1));
                //var opening = imgColorFiltered.MorphologyEx(MorphOp.Close, kernel, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(1.0));
                //imageBox4.Image = opening;
                #endregion

                #region gradient
                // open > erosion and dilate
                //Mat kernel = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1));
                //var opening = imgColorFiltered.MorphologyEx(MorphOp.Gradient, kernel, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(1.0));
                //imageBox4.Image = opening;
                #endregion



                #region Canny and edge detection
                //UMat cannyEdges = new UMat();
                //CvInvoke.Canny(imageDilate, cannyEdges, CannyThreshold1, CannyThreshold2, CannyApertureSize, CannyL2Gradient);
                //imageBox6.Image = cannyEdges;
                #endregion


                #region objects
                VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
                CvInvoke.FindContours(opening /*cannyEdges*/, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                int count = contours.Size;
                for (int i = 0; i < count; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                        using (VectorOfPoint approxContour = new VectorOfPoint())
                        {
                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                            double area = CvInvoke.ContourArea(approxContour, false);
                            if (area > 300)
                            {
                                CvInvoke.DrawContours(imageFrame, contour, -1, new MCvScalar(255, 0, 0), 2);
                                imageFrame.Draw($"{area}", new Point(50, 50), Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.8, new Bgr(Color.Red));
                            }
                        }
                }
                #endregion
                imageBox1.Image = imageFrame;



                //var imgSmoothed = imageFrame.PyrDown().PyrUp();
                //imgSmoothed._SmoothGaussian(3);
                //imageBox2.Image = imgSmoothed;

                //var imgColorFiltered = imgSmoothed.InRange(new Bgr(Bmin, Gmin, Rmin), new Bgr(Bmax, Gmax, Rmax));
                //imgColorFiltered = imgColorFiltered.PyrDown().PyrUp();
                //imgColorFiltered._SmoothGaussian(3);
                //imageBox3.Image = imgColorFiltered;

                //Gray grayCannyThreshold = new Gray(GrayCannyTh);
                //Gray grayCircleThreshold = new Gray(GrayCircleTh);
                //Gray grayLinking = new Gray(80);

                //var imgCanny = imgColorFiltered.Canny(grayCannyThreshold.Intensity, grayLinking.Intensity);
                //imgColorFiltered.Canny(1.0, 2.0, 3, false);

                //var imgCircles = imageFrame.CopyBlank();
                //var imgLines = imageFrame.CopyBlank();
                //var imgPoly = imageFrame.CopyBlank();

                //double dblAccumRes = 1.0;
                //double dblMinDist = 1.0;
                //int intMinRadius = 50;
                //int intMaxRadius = 150;

                ////var circles = imgColorFiltered.HoughCircles(grayCannyThreshold, grayCircleThreshold, dblAccumRes, dblMinDist, intMinRadius, intMaxRadius)[0];
                ////foreach (var circ in circles)
                ////{
                ////    imgCircles.Draw(circ, new Bgr(Color.Red), 2);
                ////    imageFrame.Draw(circ, new Bgr(Color.Red), 2);
                ////}
                ////Contour<Point> contours = imgCanny.FindContours();
                ////List<RotatedRect> lstRectangles = new List<RotatedRect>();



                //Double dblRhoRes = 1.0;
                //Double dblThetaRes = 4.0 * (Math.PI / 180.0);
                //int intThreshold = 20;
                //Double dblMinLineWidth = 30.0;
                //Double dblMinGapBetweenLines = 10.0;

                //imgColorFiltered.Ho
                //LineSegment2D[] lines = imgCanny.Clone().HoughLinesBinary(dblRhoRes, dblThetaRes, intThreshold, dblMinLineWidth, dblMinGapBetweenLines)[0];

                //foreach (LineSegment2D line in lines)
                //{
                //    imgLines.Draw(line, new Bgr(Color.DarkGreen), 2);
                //    imageFrame.Draw(line, new Bgr(Color.DarkGreen), 2);
                //}
                //imageBox4.Image = imgLines;


                //VectorOfVectorOfPointF contours = new VectorOfVectorOfPointF();
                //Mat hierarchy = null;
                //CvInvoke.FindContours(imgCanny, contours, hierarchy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainCode);

                ////Contour<Point> contours = imgCanny.FindContours();
                //List<MCvBox2D> lstRectangles = new List<MCvBox2D>();
                //List<Contour<Point>> lstPoluhons = new List<Contour<Point>>();

                //while (contours != null)
                //{
                //    Contour<Point> contour = contours.ApproxPoly(10.0);

                //    if (contour.Area > 250.0)
                //    {
                //        if (contour.Total == 4)
                //        {
                //            Point[] ptPoints = contour.ToArray();
                //            Boolean blnIsRectangle = true;

                //            LineSegment2D[] ls2dEdges = PointCollection.PolyLine(ptPoints, true);

                //            for (int i = 0; i < ls2dEdges.Length - 1; i++)
                //            {
                //                Double dblAngle = Math.Abs(ls2dEdges[(i + 1) % ls2dEdges.Length].GetExteriorAngleDegree(ls2dEdges[i]));
                //                if (dblAngle < 80.0 || dblAngle > 100.0)
                //                {
                //                    blnIsRectangle = false;
                //                }
                //            }

                //            if (blnIsRectangle)
                //                lstRectangles.Add(contour.GetMinAreaRect());


                //        }
                //    }



                //    contours = contours.HNext;
                //}



                //foreach (MCvBox2D rect in lstRectangles)
                //{
                //    imgTrisRectsPolys.Draw(rect, new Bgr(Color.Blue), 2);
                //    if (chbDrawTrianglesAndPolygansOnOriginalImage.Checked == true)
                //    {
                //        imgOriginal.Draw(rect, new Bgr(Color.Blue), 2);
                //    }
                //}



                //CvInvoke.CvtColor(CameraFeed, HSV, Emgu.CV.CvEnum.ColorConversion.Bgr2Hsv);


                //CvInvoke.InRange(HSV, new MCvScalar(Bmin, Gmin, Rmin), new MCvScalar(Bmax, Gmax, Rmax), Threshold);

                //CvInvoke.Mor



                //histogramBoxCapture. = CameraFeed;

                //var imageFrame = Frame.ToImage<Bgr, byte>();
            }
            catch (Exception ex)
            {
            }

            Busy = false;
        }