示例#1
0
    public void HandleWebcamQueryFrame(object sender, EventArgs e)
    {
        if (webcam.IsOpened)
        {
            webcam.Retrieve(image);
        }
        if (image.IsEmpty)
        {
            return;
        }

        imageGray = image.Clone();
        CvInvoke.CvtColor(image, imageGray, ColorConversion.Bgr2Gray);
        if (imageGray.IsEmpty)
        {
            return;
        }

        frontFaces = frontFaceCascadeClassifier.DetectMultiScale(image: imageGray, scaleFactor: 1.1, minNeighbors: 5, minSize: new Size(MIN_FACE_SIZE, MIN_FACE_SIZE), maxSize: new Size(MAX_FACE_SIZE, MAX_FACE_SIZE));
        Debug.Log(frontFaces.Length.ToString());

        for (int i = 0; i < frontFaces.Length; i++)
        {
            CvInvoke.Rectangle(image, frontFaces[i], new MCvScalar(0, 180, 0), 0);
            Debug.Log("i: " + i.ToString());
        }

        //Nouvelle matrice qui focus sur le premier visage
        if (frontFaces.Length > 0)
        {
            image = new Mat(image, frontFaces[0]);
        }
        DisplayFrame(image);

        //Seuillage adaptatif
        Mat hierarchy = new Mat();

        CvInvoke.AdaptiveThreshold(imageGray, imageGray, maxValue, AdaptiveThresholdType.MeanC, ThresholdType.Binary, blockSize, diviser);
        CvInvoke.FindContours(imageGray, allContours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);

        desiredContours.Clear();
        for (int i = 0; i < allContours.Size; i++)
        {
            if (CvInvoke.ContourArea(allContours[i]) > contourSizeMin && CvInvoke.ContourArea(allContours[i]) < contourSizeMax)
            {
                desiredContours.Push(allContours[i]);
            }
        }

        CvInvoke.DrawContours(image, desiredContours, -1, new MCvScalar(200, 100, 200), 2);

        //RotatedRect rotatedRect;
        //rotatedRect = CvInvoke.MinAreaRect(biggestContour);

        //rotatedRect.GetVertices();

        CvInvoke.Imshow("Webcam view Normal", image);
        CvInvoke.Imshow("Webcam view Gray", imageGray);
    }
示例#2
0
    private Tuple <int, int> Traitement(Mat m, Mat structure, Mat output)
    {
        //filtre median
        Mat binaryMatFiltered = new Mat();

        binaryMatFiltered = MedianFilter(m);

        //erosion dilatation
        Mat fermetureMat = Fermeture(binaryMatFiltered, structure);

        //contours
        VectorOfVectorOfPoint contours        = new VectorOfVectorOfPoint();
        VectorOfVectorOfPoint desiredContours = new VectorOfVectorOfPoint();
        Mat hierarchy = new Mat();

        CvInvoke.FindContours(fermetureMat, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);


        desiredContours.Clear();
        for (int i = 0; i < contours.Size; i++)
        {
            if (CvInvoke.ContourArea(contours[i]) > contourSizeMin && CvInvoke.ContourArea(contours[i]) < contourSizeMax)
            {
                desiredContours.Push(contours[i]);
            }
        }

        //recherche de contour
        VectorOfPoint biggest_contour = new VectorOfPoint();
        int           biggest_contour_index;
        double        biggest_contour_area = 0;

        //plus gros contour
        for (int i = 0; i < desiredContours.Size; i++)
        {
            if (CvInvoke.ContourArea(desiredContours[i]) > biggest_contour_area)
            {
                biggest_contour       = desiredContours[i];
                biggest_contour_index = i;
                biggest_contour_area  = CvInvoke.ContourArea(desiredContours[i]);
            }
        }

        //centroid
        var   moments  = CvInvoke.Moments(biggest_contour);
        int   cx       = (int)(moments.M10 / moments.M00);
        int   cy       = (int)(moments.M01 / moments.M00);
        Point centroid = new Point(cx, cy);

        CvInvoke.DrawContours(output, desiredContours, -1, new MCvScalar(150), 3);
        CvInvoke.Circle(output, centroid, 5, new MCvScalar(150), 3);

        return(Tuple.Create(cx, cy));
    }
示例#3
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            if (_capture != null && _capture.Ptr != IntPtr.Zero)
            {
                _capture.Retrieve(frame, 0);
                gpuFrame.Upload(frame);
                cudaBgMOG2.Apply(gpuFrame, gpuSub);
                CudaInvoke.Threshold(gpuSub, gpuSub, 12, 255, Emgu.CV.CvEnum.ThresholdType.Binary);
                gpuSub.Download(outSub);

                CvInvoke.FindContours(outSub, contours, hiererachy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);

                for (int i = 0; i < contours.Size; i++)
                {
                    if (CvInvoke.ContourArea(contours[i]) > 50)
                    {
                        contoursGood.Push(contours[i]);
                    }
                }

                grayImage = new Image <Gray, byte>(frame.Width, frame.Height, new Gray(0));
                grayImage.SetZero();
                CvInvoke.DrawContours(grayImage, contoursGood, -1, new MCvScalar(255, 255, 255), -1);
                CvInvoke.Dilate(grayImage, grayImage, element, new Point(-1, -1), 6, Emgu.CV.CvEnum.BorderType.Constant, new MCvScalar(255, 255, 255));
                contoursGood.Clear();

                CvInvoke.FindContours(grayImage, contours, hiererachy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);

                List <Point> points = new List <Point>();

                for (int i = 0; i < contours.Size; i++)
                {
                    MCvMoments moments          = CvInvoke.Moments(contours[i], false);
                    Point      WeightedCentroid = new Point((int)(moments.M10 / moments.M00), (int)(moments.M01 / moments.M00));
                    points.Add(WeightedCentroid);
                }

                blobList.AssignToBlobs(points);
                blobList.Draw(frame);
                blobList.Draw(mask);
                blobList.Update();

                CvInvoke.DrawContours(frame, contours, -1, new MCvScalar(0, 0, 255));

                imageBox1.Image = frame;
                imageBox2.Image = mask;

                grayImage.Dispose();

                indexFrame++;
            }
        }
示例#4
0
        //Set the maximum contour
        public void largestContour()
        {
            int maxVal = 0;

            for (int i = 0; i < contours.Size; i++)
            {
                if (contours[i].Size > maxVal)
                {
                    maxVal = contours[i].Size;
                    if (maxContour.Size != 0)
                    {
                        maxContour.Clear();
                    }
                    maxContour.Push(contours[i]);
                }
            }
        }
示例#5
0
        // this method find the  contour in given area region
        private void FindTargetByArea(VectorOfVectorOfPoint invvp, double areamin, double areamax, ref VectorOfVectorOfPoint outvvp)
        {
            int area = 0;

            outvvp.Clear();
            for (int i = 0; i < invvp.Size; i++)
            {
                using (VectorOfPoint contour = invvp[i])
                    using (VectorOfPoint approxContour = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(contour, approxContour, 5, true);
                        area = (int)Math.Abs(CvInvoke.ContourArea(approxContour));
                        //area in given region
                        if (area > areamin && area < areamax)
                        {
                            outvvp.Push(approxContour);
                        }
                    }
            }
        }
示例#6
0
        private void COVID19Test_Click(object sender, EventArgs e)
        {
            try
            {
                if (!IMGDict.ContainsKey("input"))
                {
                    throw new Exception("Selct an image first.");
                }

                var img        = IMGDict["input"].SmoothGaussian(3);
                var edges      = img.Convert <Gray, byte>().Canny(150, 50);
                Mat morphology = new Mat();
                CvInvoke.MorphologyEx(edges, morphology, MorphOp.Close, Mat.Ones(5, 5, DepthType.Cv8U, 1),
                                      new Point(-1, -1), 3, BorderType.Default, new MCvScalar(0));

                VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
                Mat h = new Mat();
                CvInvoke.FindContours(morphology, contours, h, RetrType.Tree, ChainApproxMethod.ChainApproxSimple);

                var preprocessed = edges.CopyBlank();
                var data         = h.GetData();
                for (int r = 0; r < data.GetLength(0); r++)
                {
                    for (int c = 0; c < data.GetLength(1); c++)
                    {
                        if ((((int)data.GetValue(r, c, 2))) == -1 &&
                            (((int)data.GetValue(r, c, 3)) > -1))
                        {
                            var bbox = CvInvoke.BoundingRectangle(contours[c]);
                            var AR   = bbox.Width / (float)bbox.Height;
                            if (AR <= 2.0)
                            {
                                CvInvoke.DrawContours(preprocessed, contours, c, new MCvScalar(255), -1);
                            }
                        }
                    }
                }

                var output1 = edges.CopyBlank();
                CvInvoke.Dilate(preprocessed, output1, Mat.Ones(10, 1, DepthType.Cv8U, 1), new Point(-1, -1),
                                1, BorderType.Default, new MCvScalar(0));

                contours.Clear();
                CvInvoke.FindContours(output1, contours, h, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                var finaloutput = edges.CopyBlank();
                for (int i = 0; i < contours.Size; i++)
                {
                    var bbox = CvInvoke.BoundingRectangle(contours[i]);
                    if (bbox.Height > (bbox.Width * 3))
                    {
                        CvInvoke.DrawContours(finaloutput, contours, i, new MCvScalar(255), -1);
                        preprocessed.ROI = bbox;
                        int count = CountContours(preprocessed);
                        preprocessed.ROI = Rectangle.Empty;
                        string    msg = "";
                        MCvScalar color;
                        if (count == 2)
                        {
                            msg   = "Negative";
                            color = new MCvScalar(0, 255, 0);
                        }
                        else
                        {
                            msg   = "Positive";
                            color = new MCvScalar(0, 0, 255);
                        }
                        int margin = 50;
                        CvInvoke.PutText(img, msg, new(bbox.X - margin, bbox.Y - margin), FontFace.HersheyPlain, 2.5, color, 3);
                    }
                }

                pictureBox1.Image = img.ToBitmap();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
示例#7
0
文件: Test.cs 项目: Piteul/EmguCV_TP1
    // Update is called once per frame
    void Update()
    {
        //A Mat image - basic container
        Mat image;

        //Query the frame the webcam
        image = webcam.QueryFrame();

        //Flip the image
        Mat flippedImage = image.Clone();

        CvInvoke.Flip(image, flippedImage, FlipType.Horizontal);


        Mat imgGray = image.Clone();
        Mat imgHSV  = image.Clone();

        CvInvoke.CvtColor(image, imgGray, ColorConversion.Bgr2Gray);
        CvInvoke.CvtColor(image, imgHSV, ColorConversion.Bgr2Hsv);

        Mat imgBlur = image.Clone();


        switch (BlurType.Median)
        {
        case BlurType.Blur:
            CvInvoke.Blur(image, imgBlur, new Size(2, 2), new Point(-1, 1));
            break;

        case BlurType.Median:
            CvInvoke.MedianBlur(image, imgBlur, 3);
            break;

        case BlurType.Gaussian:
            CvInvoke.GaussianBlur(image, imgBlur, new Size(2, 2), 2);
            break;
        }

        //On va faire ressortir une seule couleur
        Image <Hsv, byte> imageHSV = imgHSV.ToImage <Hsv, byte>();
        //(ici vert)
        //Hsv teinteBas = new Hsv(60, 100, 0);
        //Hsv teinteHaut = new Hsv(80, 255, 255);

        Hsv teinteBas  = new Hsv(hMin, sMin, vMin);
        Hsv teinteHaut = new Hsv(hMax, sMax, vMax);
        Image <Gray, byte> imgFilter = imageHSV.InRange(teinteBas, teinteHaut);

        //Contour
        Mat hierarchy = new Mat();

        contours.Clear();
        biggestContourArea = 0;
        CvInvoke.FindContours(imgFilter, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);
        //CvInvoke.DrawContours(image, contours, -1, new MCvScalar(200, 100, 200), 10);

        for (int i = 0; i < contours.Size; i++)
        {
            if (CvInvoke.ContourArea(contours[i]) > biggestContourArea)
            {
                biggestContour      = contours[i];
                biggestContourIndex = i;
                biggestContourArea  = CvInvoke.ContourArea(contours[i]);
            }
        }
        CvInvoke.DrawContours(image, contours, biggestContourIndex, new MCvScalar(200, 100, 200), 3);

        Debug.Log(biggestContourArea.ToString());
        //Centroid
        var   moments  = CvInvoke.Moments(biggestContour);
        int   cx       = (int)(moments.M10 / moments.M00);
        int   cy       = (int)(moments.M01 / moments.M00);
        Point centroid = new Point(cx, cy);


        //Ouverture morphologique
        Mat structuringElement = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(2 * operationSize + 1, 2 * operationSize + 1), new Point(operationSize, operationSize));
        Mat imgMorpho          = image.Clone();

        CvInvoke.Erode(imgFilter, imgMorpho, structuringElement, new Point(-1, -1), nbOfIter, BorderType.Constant, new MCvScalar(0));
        CvInvoke.Dilate(imgMorpho, imgMorpho, structuringElement, new Point(-1, -1), nbOfIter, BorderType.Constant, new MCvScalar(0));


        //Invoke the c++ interface function "imshow"
        //Display image in a separated window named "Webcam view"
        CvInvoke.Imshow("Webcam view classic", image);
        //CvInvoke.Imshow("Webcam view flipped", flippedImage);
        //CvInvoke.Imshow("Webcam view Gray", imgGray);
        //CvInvoke.Imshow("Webcam view HSV", imgHSV);
        //CvInvoke.Imshow("Webcam view Blur", imgBlur);
        CvInvoke.Imshow("Webcam view HSVFilter", imgFilter);
        CvInvoke.Imshow("Webcam view Morpho", imgMorpho);


        //for(int i=0; i<100; i++) {

        //    CvInvoke.Imshow("Webcam view HSV" + i.ToString(), flippedImage);

        //}

        CvInvoke.WaitKey(24);
    }
示例#8
0
        static void Main(string[] args)
        {
            NetworkTable.SetClientMode();
            NetworkTable.SetTeam(4488);
            NetworkTable.SetIPAddress("10.44.88.2");
#if KANGAROO
            NetworkTable.SetNetworkIdentity("Kangaroo");
#else
            NetworkTable.SetNetworkIdentity("CameraTracking");
#endif
            //Switch between Kangaroo and Desktop.
            //On kangaroo, use different table and don't display image
            visionTable = NetworkTable.GetTable("SmartDashboard");

            //ImageGrabber imageGrabber = new ImageGrabber(visionTable);

            Mat HsvIn = new Mat(), HsvOut = new Mat(), output = new Mat(), Temp = new Mat();
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            //VectorOfVectorOfPoint filteredContours = new VectorOfVectorOfPoint();

            //MCvScalar low = new MCvScalar(63, 44, 193);
            //MCvScalar high = new MCvScalar(97, 255, 255);

            double[] defaultLow            = new double[] { 50, 44, 193 };
            double[] defaultHigh           = new double[] { 90, 255, 255 };

            VectorOfDouble arrayLow  = new VectorOfDouble(3);
            VectorOfDouble arrayHigh = new VectorOfDouble(3);

            Point TopMidPoint    = new Point((int)(ImageWidth / 2), 0);
            Point BottomMidPoint = new Point((int)(ImageWidth / 2), (int)ImageHeight);

            Point LeftMidPoint  = new Point(0, (int)(ImageHeight / 2));
            Point RightMidPoint = new Point((int)ImageWidth, (int)(ImageHeight / 2));

            Stopwatch sw = new Stopwatch();

            CameraWatcher cameraChecker = new CameraWatcher();
            //UsbManager2 cameraChecker = new UsbManager2();
            //cameraChecker.startWatcher();

            int count = 0;

            //visionTable.PutNumberArray("HSVLow", defaultLow);
            //visionTable.PutNumberArray("HSVHigh", defaultHigh);

            //visionTable.PutNumber("ShooterOffsetDegreesX", ShooterOffsetDegreesX);
            //visionTable.PutNumber("ShooterOffsetDegreesY", ShooterOffsetDegreesY);

            Thread timer = new Thread(() =>
            {
                while (true)
                {
                    // update kangaroo battery info
                    visionTable.PutNumber("KangarooBattery",
                                          System.Windows.Forms.SystemInformation.PowerStatus.BatteryLifePercent);

                    // check camera status
                    int cameraState = cameraChecker.CheckState;
                    // camera states:
                    // 0 = Camera is found and working
                    // 1 = Camera is not found, waiting for reconnect to reinitialize
                    // 2 = Camera was found again, re-init was kicked off
                    visionTable.PutNumber("CameraState", cameraState);
                    if (cameraState == 0)
                    {
                        // Camera is connected and fine
                        //Console.WriteLine("Camera alive");
                    }
                    else if (cameraState == 1)
                    {
                        // Camera is disconnected or having problems
                        //Console.WriteLine("Camera dead, waiting for reconnect");
                    }
                    else if (cameraState == 2)
                    {
                        // Camera reconnected
                        //Console.WriteLine("Camera found again, reinitializing");
                        Process.Start("C:/Users/Shockwave/Desktop/NewKangaroo/cameraRestart.exe");     // Launch external exe to kill process, set up camera, and restart
                    }

                    Thread.Sleep(5000);
                }
            });
            timer.Start();
            GC.KeepAlive(timer);
            int imageCount = 0;

            ImageBuffer im  = new ImageBuffer();
            Capture     cap = new Capture(0); //Change me to 1 to use external camera
            cap.FlipVertical = true;

            cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth, 1280);
            cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight, 720);

            ImageSaver saver = new ImageSaver();
            //int saveCount = 0;
            int  rdi        = 1;
            int  kernalSize = 6 * rdi + 1;
            Size ksize      = new Size(kernalSize, kernalSize);

            while (true)
            {
                count++;
                sw.Restart();
                //ImageBuffer image = imageGrabber.Image();
                cap.Grab();
                im.GyroAngle = visionTable.GetNumber("Gyro", 0.0);
                cap.Retrieve(im.Image);

                ImageBuffer image = im.Clone();

#if KANGAROO
                visionTable.PutNumber("KangarooHeartBeat", count);
#endif
                if (image == null || image.IsEmpty)
                {
                    image?.Dispose();
                    Thread.Yield();
                    continue;
                }

                /*
                 * // Image saver for debugging
                 * if (visionTable.GetBoolean("LightsOn", false))
                 * {
                 *  saveCount++;
                 *  if (saveCount >= 6)
                 *  {
                 *      saver.AddToQueue(image.Image);
                 *      saveCount = 0;
                 *  }
                 * }*/

                double[] ntLow  = visionTable.GetNumberArray("HSVLow", defaultLow);
                double[] ntHigh = visionTable.GetNumberArray("HSVHigh", defaultHigh);

                if (ntLow.Length != 3)
                {
                    ntLow = defaultLow;
                }
                if (ntHigh.Length != 3)
                {
                    ntHigh = defaultHigh;
                }

                arrayLow.Clear();
                arrayLow.Push(ntLow);
                arrayHigh.Clear();
                arrayHigh.Push(ntHigh);

                Mat BlurTemp = new Mat();
                CvInvoke.GaussianBlur(image.Image, BlurTemp, ksize, rdi);
                Mat oldImage = image.Image;
                image.Image = BlurTemp;
                oldImage.Dispose();

                //HSV Filter
                CvInvoke.CvtColor(image.Image, HsvIn, Emgu.CV.CvEnum.ColorConversion.Bgr2Hsv);
                CvInvoke.InRange(HsvIn, arrayLow, arrayHigh, HsvOut);

                HsvOut.ConvertTo(Temp, DepthType.Cv8U);
                //Contours
                CvInvoke.FindContours(Temp, contours, null, RetrType.List, ChainApproxMethod.ChainApproxTc89Kcos);
                //CvInvoke.DrawContours(output, contours, -1, new MCvScalar(0, 0, 0));

                VectorOfVectorOfPoint convexHulls = new VectorOfVectorOfPoint(contours.Size);

                for (int i = 0; i < contours.Size; i++)
                {
                    CvInvoke.ConvexHull(contours[i], convexHulls[i]);
                }

                Rectangle?largestRectangle   = null;
                double    currentLargestArea = 0.0;

                //Filter contours
                for (int i = 0; i < convexHulls.Size; i++)
                {
                    VectorOfPoint contour = convexHulls[i];
                    VectorOfPoint polygon = new VectorOfPoint(convexHulls.Size);
                    CvInvoke.ApproxPolyDP(contour, polygon, 10, true);

                    //VectorOfVectorOfPoint cont = new VectorOfVectorOfPoint(1);
                    //cont.Push(polygon);

                    //CvInvoke.DrawContours(image.Image, cont,-1, Green, 2);

                    // Filter if shape has more than 4 corners after contour is applied
                    if (polygon.Size != 4)
                    {
                        polygon.Dispose();
                        continue;
                    }

                    // Filter if not convex
                    if (!CvInvoke.IsContourConvex(polygon))
                    {
                        polygon.Dispose();
                        continue;
                    }

                    ///////////////////////////////////////////////////////////////////////
                    // Filter if there isn't a nearly horizontal line
                    ///////////////////////////////////////////////////////////////////////
                    //int numVertical = 0;
                    int numHorizontal = 0;
                    for (int j = 0; j < 4; j++)
                    {
                        double dx    = polygon[j].X - polygon[(j + 1) % 4].X;
                        double dy    = polygon[j].Y - polygon[(j + 1) % 4].Y;
                        double slope = double.MaxValue;

                        if (dx != 0)
                        {
                            slope = Math.Abs(dy / dx);
                        }

                        double nearlyHorizontalSlope = Math.Tan(ToRadians(20));
                        //double rad = ToRadians(60);
                        //double nearlyVerticalSlope = Math.Tan(rad);

                        //if (slope > nearlyVerticalSlope) numVertical++;
                        if (slope < nearlyHorizontalSlope)
                        {
                            numHorizontal++;
                        }
                    }

                    if (numHorizontal < 1)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////
                    //CvInvoke.PutText(image.Image, "Number of horizontal (>=1): " + (numHorizontal).ToString(), TextPoint4, FontFace.HersheyPlain, 2, Green);

                    ///////////////////////////////////////////////////////////////////////
                    // Filter if polygon is above a set limit. This should remove overhead lights and windows
                    ///////////////////////////////////////////////////////////////////////
                    Rectangle bounds = CvInvoke.BoundingRectangle(polygon);
                    CvInvoke.PutText(image.Image, "Vertical (>=300): " + (bounds.Location.Y).ToString(), TextPoint, FontFace.HersheyPlain, 2, Green);
                    int topY = 300;
                    if (bounds.Location.Y < topY)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    CvInvoke.PutText(image.Image, "Image Height (45-115) and Width (65-225): " + bounds.Height.ToString() + " , " + bounds.Width, TextPoint2, FontFace.HersheyPlain, 2, Green);

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by minimum and maximum height
                    ///////////////////////////////////////////////////////////////////////
                    if (bounds.Height < 45 || bounds.Height > 115)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by minimum and maximum width
                    ///////////////////////////////////////////////////////////////////////
                    if (bounds.Width < 65 || bounds.Width > 225)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by height to width ratio
                    ///////////////////////////////////////////////////////////////////////
                    double ratio = (double)bounds.Height / bounds.Width;
                    CvInvoke.PutText(image.Image, "Ratio: " + ratio.ToString(), TextPoint3, FontFace.HersheyPlain, 2, Green);
                    if (ratio > 1.0 || ratio < .3)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by area to vertical position ratio
                    ///////////////////////////////////////////////////////////////////////
                    double area          = CvInvoke.ContourArea(contour);
                    double areaVertRatio = area / (1280 - bounds.Location.Y);
                    CvInvoke.PutText(image.Image, "Area/Vert Ratio (8-19): " + areaVertRatio.ToString(), TextPoint4, FontFace.HersheyPlain, 2, Green);

                    if (areaVertRatio < 8 || areaVertRatio > 19)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    //CvInvoke.PutText(image.Image, "Area: " + area.ToString(), TextPoint2, FontFace.HersheyPlain, 2, Green);

                    CvInvoke.Rectangle(image.Image, bounds, Blue, 2);

                    if (area > currentLargestArea)
                    {
                        largestRectangle = bounds;
                    }

                    //filteredContours.Push(contour);

                    polygon.Dispose();
                }
                visionTable.PutBoolean("TargetFound", largestRectangle != null);
                //CvInvoke.PutText(image.Image, "Target found: " + (largestRectangle != null).ToString(), TextPoint5, FontFace.HersheyPlain, 2, Green);


                if (largestRectangle != null)
                {
                    ProcessData(largestRectangle.Value, image);
                    CvInvoke.Rectangle(image.Image, largestRectangle.Value, Red, 5);
                }

                //ToDo, Draw Crosshairs
                //CvInvoke.Line(image.Image, TopMidPoint, BottomMidPoint, Blue, 3);
                //CvInvoke.Line(image.Image, LeftMidPoint, RightMidPoint, Blue, 3);

                //int fps = (int)(1.0 / sw.Elapsed.TotalSeconds);
                //CvInvoke.PutText(image.Image, fps.ToString(), TextPoint, FontFace.HersheyPlain, 2, Green);

                imageCount++;

                // Uncomment below to see the HSV window
                //CvInvoke.Imshow("HSV", HsvOut);
                // Uncomment below to see the main image window
                CvInvoke.Imshow("MainWindow", image.Image);
                image.Dispose();



                //report to NetworkTables

                //Cleanup

                for (int i = 0; i < contours.Size; i++)
                {
                    contours[i].Dispose();
                }
                contours.Clear();

                for (int i = 0; i < convexHulls.Size; i++)
                {
                    convexHulls[i].Dispose();
                }
                convexHulls.Dispose();

                /*
                 * for (int i = 0; i < filteredContours.Size; i++)
                 * {
                 *  filteredContours[i].Dispose();
                 * }
                 * filteredContours.Clear();
                 */

                CvInvoke.WaitKey(1);
            }
            ;
        }