예제 #1
0
        private Task createMask(Image <Gray, Byte> src, int areaTreshold = 350)
        {
            #region fillGranes
            return(Task.Run(delegate() {
                VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
                Mat hierarchy = new Mat();
                CvInvoke.FindContours(src.Clone(), contours, hierarchy, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                int contoursCount = contours.Size;
                for (int i = 0; i < contoursCount; i++)
                {
                    using (VectorOfPoint contour = contours[i]){
                        double area = CvInvoke.ContourArea(contours[i], false);
                        if (area > areaTreshold)
                        {
                            CvInvoke.DrawContours(this.mask, contours, i, new MCvScalar(255), -1);
                        }
                        else
                        {
                            CvInvoke.DrawContours(this.mask, contours, i, new MCvScalar(0), -1);
                        }
                    }
                }
                Mat kernel = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Ellipse, new Size(3, 3), new Point(-1, -1));
                this.mask._MorphologyEx(Emgu.CV.CvEnum.MorphOp.Erode, kernel, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar(1.0));
                contours.Dispose();
            }));

            #endregion
        }
예제 #2
0
        /// <summary>
        /// This function will take a segmented grayscale image and the likeliest candidates for hands.
        /// They are chosen as the two largest contours with a size of at least 10'000 pixels.
        /// </summary>
        /// <param name="inputImage">Already segmented grayscale image.</param>
        /// <param name="pixelThreshold">Number of pixels required to be counted as a hand.</param>
        /// <param name="numberOfContours">The n largest contours which will be picked from the list.</param>
        /// <returns>Vector of contours</returns>
        public static VectorOfVectorOfPoint LargestContours(Image <Gray, byte> inputImage,
                                                            int pixelThreshold = PixelThreshold, int numberOfContours = NumberOfContours)
        {
            VectorOfVectorOfPoint contours       = new VectorOfVectorOfPoint();
            VectorOfVectorOfPoint sortedContours = new VectorOfVectorOfPoint();
            Mat hierarchyMat = new Mat();

            CvInvoke.FindContours(inputImage, contours, hierarchyMat, RetrType.Tree, ChainApproxMethod.ChainApproxNone);

            if (contours.Size > 0)
            {
                Dictionary <VectorOfPoint, double> contourDict = new Dictionary <VectorOfPoint, double>();
                for (int i = 0; i < contours.Size; i++)
                {
                    double contourArea = CvInvoke.ContourArea(contours[i]);
                    contourDict.Add(contours[i], contourArea);
                }

                var orderedDict = contourDict.OrderByDescending(area => area.Value).TakeWhile(area => area.Value > pixelThreshold);
                if (orderedDict.Count() > numberOfContours)
                {
                    orderedDict = orderedDict.Take(numberOfContours);
                }
                foreach (var contour in orderedDict)
                {
                    sortedContours.Push(contour.Key);
                }
            }

            hierarchyMat.Dispose();
            contours.Dispose();
            return(sortedContours);
        }
예제 #3
0
        /// <summary>
        /// This will find the hands and measure the distance from the top-left corner of the image.
        /// </summary>
        /// <param name="inputImage">A standard BGR image.</param>
        /// <param name="point">The distance to the hand is measured from this point. In (x, y) coordinates from the top left.</param>
        /// <returns>The distance between the top-left corner and the closest hand (in pixels).</returns>
        public static float MeasureHgd(Image <Bgr, byte> inputImage, PointF point)
        {
            VectorOfVectorOfPoint largestContours = FindHands(inputImage);
            float distance = MeasureDistance(largestContours, point);

            largestContours.Dispose();
            return(distance);
        }
예제 #4
0
        /// <summary>
        /// This function will segment, erode and filter a BGR image to find the hands
        /// and then return the hand contours as white contours on a black background.
        /// This is useful for testing or demonstrating the hand detection algorithm.
        /// </summary>
        /// <param name="inputImage">A standard BGR image.</param>
        /// <returns>A grayscale image with the hands as white.</returns>
        public static Image <Gray, byte> AnalyseImage(Image <Bgr, byte> inputImage)
        {
            Image <Gray, byte>    outputImage  = new Image <Gray, byte>(inputImage.Size);
            VectorOfVectorOfPoint handContours = FindHands(inputImage);

            CvInvoke.DrawContours(outputImage, handContours, -1, new MCvScalar(255), -1);

            handContours.Dispose();
            return(outputImage);
        }
예제 #5
0
        public void Dispose()
        {
            if (_inputImage != null)
            {
                _inputImage.Dispose();
            }
            if (_defectsContoursMatrix != null)
            {
                _defectsContoursMatrix.Dispose();
            }
            if (_maskOfDefects != null)
            {
                _maskOfDefects.Dispose();
            }
            _largeDefectsContoursMatrix = null;
            _smallDefectsContoursMatrix = null;

            GC.Collect();
            GC.WaitForPendingFinalizers();
        }
예제 #6
0
        /// <summary>
        /// Here, bounding rectangle is drawn with minimum area
        /// </summary>
        /// <param name="scr"></param>
        /// <returns></returns>
        private Point[] Find_Rectangle(Image <Bgr, byte> scr)
        {
            // Declare variable
            Point[] rect           = new Point[4];
            double  area           = 0;
            int     index_contours = 0;


            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();
            Image <Gray, byte> GrayImages      = scr.Convert <Gray, byte>();
            Image <Gray, byte> ThresholdImages = new Image <Gray, byte>(GrayImages.Size);

            // Find contours
            CvInvoke.Threshold(GrayImages, ThresholdImages, threshold_value, 255, ThresholdType.Binary);
            CvInvoke.FindContours(ThresholdImages, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
            for (int i = 0; i < contours.Size; i++)
            {
                double s = CvInvoke.ContourArea(contours[i]);
                if (area < s)
                {
                    area           = s;
                    index_contours = i;
                }
            }
            // Find Rectangle Box With 4 Points
            RotatedRect RectangleMin = CvInvoke.MinAreaRect(contours[index_contours]);

            PointF[] RectangleMin_Tip = CvInvoke.BoxPoints(RectangleMin);
            for (int i = 0; i < 4; i++)
            {
                rect[i] = Point.Round(RectangleMin_Tip[i]);
            }
            // Clear memory
            GrayImages.Dispose();
            ThresholdImages.Dispose();
            contours.Dispose();
            hierarchy.Dispose();
            return(rect);
        }
예제 #7
0
        public static VectorOfVectorOfPoint DetectEdges(UIImage myImage, double th1, double th2, int aperture, bool value)
        {
            //Load the image from file and resize it for display
            Image <Bgr, Byte> img =
                new Image <Bgr, byte>(myImage.CGImage);
            //.Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true);

            //Convert the image to grayscale and filter out the noise
            UMat uimage = new UMat();

            CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);

            //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp();

            #region circle detection
            double cannyThreshold = th1;
            //double circleAccumulatorThreshold = 120;
            //CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);

            #endregion

            #region Canny and edge detection
            double cannyThresholdLinking = th2;
            UMat   cannyEdges            = new UMat();
            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking, aperture, true);

            VectorOfVectorOfPoint contourEdges = new VectorOfVectorOfPoint();
            UMat hierarchy = new UMat();
            CvInvoke.FindContours(cannyEdges, contourEdges, hierarchy, 0, ChainApproxMethod.ChainApproxNone);

            VectorOfVectorOfPoint newContourEdges = new VectorOfVectorOfPoint();
            for (int i = 0; i < contourEdges.Size; i++)
            {
                if (contourEdges [i].Size > 3000)
                {
                    newContourEdges.Push(contourEdges [i]);
                }
            }

            contourEdges.Dispose();

            VectorOfPoint         test1 = new VectorOfPoint();
            VectorOfVectorOfPoint temp  = new VectorOfVectorOfPoint();
            temp.Push(newContourEdges [0]);
            for (int i = 0; i < newContourEdges.Size; i++)
            {
                Point[] testing = newContourEdges [i].ToArray();
                temp[0].Push(newContourEdges [i].ToArray());
            }

            VectorOfVectorOfPoint hull = new VectorOfVectorOfPoint(1);
            CvInvoke.ConvexHull(temp[0], hull[0], true);

            /*LineSegment2D[] lines = CvInvoke.HoughLinesP(
             *  cannyEdges,
             *  1, //Distance resolution in pixel-related units
             *  Math.PI/45.0, //Angle resolution measured in radians.
             *  20, //threshold
             *  30, //min Line width
             *  5); //gap between lines
             *
             * //VectorOfPoint test1 = new VectorOfPoint();
             * //VectorOfVectorOfPoint temp = new VectorOfVectorOfPoint();
             * //temp.Push(contourEdges[0]);
             * for (int i = 0; i < contourEdges.Size; i++) {
             *  //temp[0].Push(contourEdges[i].ToArray());
             *
             *  CvInvoke.DrawContours(img, contourEdges, i, new MCvScalar(255,255,0), 4);
             * }*/

            //VectorOfVectorOfPoint hull = new VectorOfVectorOfPoint(1);
            //CvInvoke.ConvexHull(temp[0], hull[0], true);

            //VectorOfVectorOfPoint result = new VectorOfVectorOfPoint();


            #endregion

            #region Find triangles and rectangles
            //List<Triangle2DF> triangleList = new List<Triangle2DF>();
            //List<RotatedRect> boxList = new List<RotatedRect>(); //a box is a rotated rectangle

            /*using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
             * {
             *  CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple );
             *  int count = contours.Size;
             *  for (int i = 0; i < count; i++)
             *  {
             *      using (VectorOfPoint contour = contours[i])
             *      using (VectorOfPoint approxContour = new VectorOfPoint())
             *      {
             *          CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
             *          if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
             *          {
             *              if (approxContour.Size == 3) //The contour has 3 vertices, it is a triangle
             *              {
             *                  Point[] pts = approxContour.ToArray();
             *                  triangleList.Add(new Triangle2DF(
             *                      pts[0],
             *                      pts[1],
             *                      pts[2]
             *                  ));
             *              } else if (approxContour.Size == 4) //The contour has 4 vertices.
             *              {
             #region determine if all the angles in the contour are within [80, 100] degree
             *                  bool isRectangle = true;
             *                  Point[] pts = approxContour.ToArray();
             *                  LineSegment2D[] edges = PointCollection.PolyLine(pts, true);
             *
             *                  for (int j = 0; j < edges.Length; j++)
             *                  {
             *                      double angle = Math.Abs(
             *                          edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
             *                      if (angle < 80 || angle > 100)
             *                      {
             *                          isRectangle = false;
             *                          break;
             *                      }
             *                  }
             #endregion
             *
             *                  if (isRectangle) boxList.Add(CvInvoke.MinAreaRect(approxContour));
             *              }
             *          }
             *      }
             *  }
             * }*/
            #endregion

            //imageView.Image = img;

            #region draw triangles and rectangles
            //Image<Bgr, Byte> triangleRectangleImage = img;
            //foreach (Triangle2DF triangle in triangleList)
            //    triangleRectangleImage.Draw(triangle, new Bgr(Color.DarkBlue), 2);
            //foreach (RotatedRect box in boxList)
            //    triangleRectangleImage.Draw(box, new Bgr(Color.DarkOrange), 2);
            //imageView.Image = triangleRectangleImage;
            #endregion

            #region draw circles
            //Image<Bgr, Byte> circleImage = img.CopyBlank();
            //foreach (CircleF circle in circles)
            //    triangleRectangleImage.Draw(circle, new Bgr(Color.Brown), 2);
            //imageView.Image = circleImage;
            #endregion

            #region draw lines
            //Image<Bgr, Byte> lineImage = img;
            //foreach (LineSegment2D line in lines)
            //    img.Draw(line, new Bgr(Color.Yellow), 2);
            //imageView.Image = lineImage;
            #endregion

            return(value ? hull : newContourEdges); //lineImage.ToUIImage();
        }
        public static void GetBarcodeFromImageEmgu(string fname, out string format, out string code)
        {
            Image <Bgr, Byte> orgimage = new Image <Bgr, byte>(fname);

            double scaleFactor = 1;

            if (orgimage.Height > 2048)
            {
                scaleFactor = 2048 / (double)orgimage.Height;
            }

            Image <Bgr, Byte> image = new Image <Bgr, byte>((int)(orgimage.Width * scaleFactor), (int)(orgimage.Height * scaleFactor));

            //image = cv2.resize(image, (0, 0), fx = scaleFactor, fy = scaleFactor, interpolation = cv2.INTER_AREA)
            CvInvoke.Resize(orgimage, image, new Size(0, 0), scaleFactor, scaleFactor, Inter.Area);

            orgimage.Dispose();


            UMat gray = new UMat();

            CvInvoke.CvtColor(image, gray, ColorConversion.Bgr2Gray);


            /*
             * gradX = cv2.Sobel(gray, ddepth = cv2.cv.CV_32F, dx = 1, dy = 0, ksize = -1)
             * gradY = cv2.Sobel(gray, ddepth = cv2.cv.CV_32F, dx = 0, dy = 1, ksize = -1)
             */
            UMat gradX = new UMat();
            UMat gradY = new UMat();

            CvInvoke.Sobel(gray, gradX, DepthType.Cv8U, 1, 0, -1);
            CvInvoke.Sobel(gray, gradY, DepthType.Cv8U, 0, 1, -1);

            gray.Dispose();

            //pictureBox1.Image = gradY.Bitmap;

            /*
             # subtract the y-gradient from the x-gradient
             # gradient = cv2.subtract(gradX, gradY)
             # gradient = cv2.convertScaleAbs(gradient)
             */
            UMat gradient = new UMat();

            CvInvoke.Subtract(gradX, gradY, gradient);
            CvInvoke.ConvertScaleAbs(gradient, gradient, 1, 0);

            gradX.Dispose();
            gradY.Dispose();

            //pictureBox1.Image = gradient.Bitmap;

            /*
             # blur and threshold the image
             # blurred = cv2.blur(gradient, (9, 9))
             # (_, thresh) = cv2.threshold(blurred, 225, 255, cv2.THRESH_BINARY)
             */
            UMat blurred = new UMat();
            UMat thresh  = new UMat();

            CvInvoke.Blur(gradient, blurred, new Size(9, 9), new Point(-1, -1));
            CvInvoke.Threshold(blurred, thresh, 88, 255, ThresholdType.Binary);

            //pictureBox1.Image= thresh.Bitmap;
            //return;

            /*
             # construct a closing kernel and apply it to the thresholded image
             # kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (21, 7))
             # closed = cv2.morphologyEx(thresh, cv2.MORPH_CLOSE, kernel)
             */
            UMat closed = new UMat();
            var  kernel = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(21, 7), new Point(-1, -1));

            CvInvoke.MorphologyEx(thresh, closed, MorphOp.Close, kernel, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);

            blurred.Dispose();
            thresh.Dispose();



            //pictureBox1.Image= closed.Bitmap;
            //return;

            /*
             # perform a series of erosions and dilations
             # closed = cv2.erode(closed, None, iterations = 4)
             # closed = cv2.dilate(closed, None, iterations = 4)
             */
            UMat eroded  = new UMat();
            UMat dilated = new UMat();

            CvInvoke.Erode(closed, eroded, null, new Point(-1, -1), 4, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
            CvInvoke.Dilate(eroded, dilated, null, new Point(-1, -1), 4, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);

            //pictureBox1.Image= dilated.Bitmap;
            //return;


            /*
             * (cnts, _) = cv2.findContours(closed.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
             * c = sorted(cnts, key = cv2.contourArea, reverse = True)[0]
             */

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(dilated, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);
            eroded.Dispose();
            dilated.Dispose();

            double largest_area          = 0;
            int    largest_contour_index = -1;

            for (int i = 0; i < contours.Size; i++)
            {
                var       rect   = CvInvoke.MinAreaRect(contours[i]);
                PointF[]  points = rect.GetVertices();
                Rectangle BBox   = GetBoundingBox(points);

                //Get largest bounding boxes that has width>height
                if (BBox.Width > BBox.Height)
                {
                    double a = CvInvoke.ContourArea(contours[i], false);
                    if (a > largest_area)
                    {
                        largest_area          = a;
                        largest_contour_index = i;
                    }
                }
            }

            //PointF[] points = rect.GetVertices();
            var ROIrect = CvInvoke.MinAreaRect(contours[largest_contour_index]);

            PointF[]  ROIpoints = ROIrect.GetVertices();
            Rectangle ROIBBox   = GetBoundingBox(ROIpoints);

            var extraWidth  = (int)(ROIBBox.Width * 0.2);
            var extraHeight = (int)(ROIBBox.Height * 0.2);

            ROIBBox.X -= extraWidth;
            ROIBBox.Y -= extraHeight;

            ROIBBox.Width  += extraWidth * 2;
            ROIBBox.Height += extraHeight * 2;

            Bitmap   ROIbmp = new Bitmap(ROIBBox.Width, ROIBBox.Height);
            Graphics g      = Graphics.FromImage(ROIbmp);

            g.DrawImage(image.ToBitmap(), 0, 0, ROIBBox, GraphicsUnit.Pixel);

            IBarcodeReader reader = new BarcodeReader();
            var            result = reader.Decode(ROIbmp);

            // do something with the result
            if (result != null)
            {
                format = result.BarcodeFormat.ToString();
                code   = result.Text;
            }
            else
            {
                format = "";
                code   = "";
            }

            //ROIbmp.Dispose();
            contours.Dispose();
            image.Dispose();
        }
예제 #9
0
        public override FilterResultDTO filterImage(Image <Bgr, byte> input)
        {
            var prefiltered = preFilter(input);

            if (prefiltered.Density < 0.1)
            {
                prefiltered.base64image = Base64Converter.BitmapToBase64(input.ToBitmap());
                return(prefiltered);
            }

            var segmented = segStreetBySatHist(input);

            segmented._Not();
            var mask = fitTrapeziumToMask(segmented);

            if (mask.CountNonzero()[0] < 30000)
            {
                prefiltered.base64image = Base64Converter.BitmapToBase64(input.ToBitmap());
                return(prefiltered);
            }



            var xm = input.Convert <Gray, byte>();

            mask._Not();
            xm._Max(mask);
            mask._Not();


            int maxr = 0;
            int idx  = 0;
            Image <Gray, byte> cracks    = new Image <Gray, byte>(input.Size);
            Image <Gray, byte> aux       = new Image <Gray, byte>(input.Size);
            Image <Gray, byte> maxCracks = new Image <Gray, byte>(input.Size);
            Random             r         = new Random();

            for (int t = 0; t <= 255; t++)
            {
                var xmCC      = xm.ThresholdBinaryInv(new Gray(t), new Gray(255));
                int numberCC  = 0;
                var contours  = new VectorOfVectorOfPoint();
                Mat hierarchy = new Mat();
                CvInvoke.FindContours(xmCC, contours, hierarchy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);
                for (int i = 0; i < contours.Size; i++)
                {
                    if (contours[i].Size < 5)
                    {
                        continue;
                    }
                    var rect = CvInvoke.MinAreaRect(contours[i]);
                    //var ellipse = CvInvoke.FitEllipse(contours[i]);
                    var ellipse      = new Ellipse(rect);
                    var w            = ellipse.RotatedRect.Size.Width;
                    var h            = ellipse.RotatedRect.Size.Height;
                    var major        = (w > h) ? w : h;
                    var minor        = (w < h) ? w : h;
                    var eccentricity = Math.Sqrt(1 - ((minor * minor) / (major * major)));
                    if (eccentricity >= 0.90 && major >= 25 && major < 60)
                    {
                        using (VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint(contours[i]))
                        {
                            //Check skeleton
                            CvInvoke.FillPoly(aux, vvp, new MCvScalar(255));
                            var auxSkel = aux.Copy();

                            var skel  = Skeletonization(auxSkel);
                            var ratio = aux.CountNonzero()[0] / skel.CountNonzero()[0];
                            if (ratio < 4)
                            {
                                numberCC++;
                                cracks._Or(aux);
                            }
                            auxSkel.Dispose();
                            skel.Dispose();
                        }
                    }
                    if (numberCC > maxr)
                    {
                        cracks.CopyTo(maxCracks);
                        cracks.SetZero();
                        maxr = numberCC;
                        idx  = t;
                    }
                }
                contours.Dispose();
                hierarchy.Dispose();
                xmCC.Dispose();
            }

            var ret    = ImageHelper.MaskOverlay(input, maxCracks);
            var nTotal = mask.CountNonzero()[0];
            var nCrack = nTotal;

            mask.Dispose();
            segmented.Dispose();
            xm.Dispose();
            segmented.Dispose();
            aux.Dispose();
            cracks.Dispose();
            maxCracks.Dispose();
            return(new FilterResultDTO()
            {
                base64image = Base64Converter.BitmapToBase64(ret.Bitmap),
                Density = nCrack / nTotal,
                ProcessedArea = nTotal,
                Type = FilterResultDTO.CaracteristicType.Cracks,
            });
        }
예제 #10
0
        static void Main(string[] args)
        {
            NetworkTable.SetClientMode();
            NetworkTable.SetTeam(4488);
            NetworkTable.SetIPAddress("10.44.88.2");
#if KANGAROO
            NetworkTable.SetNetworkIdentity("Kangaroo");
#else
            NetworkTable.SetNetworkIdentity("CameraTracking");
#endif
            //Switch between Kangaroo and Desktop.
            //On kangaroo, use different table and don't display image
            visionTable = NetworkTable.GetTable("SmartDashboard");

            //ImageGrabber imageGrabber = new ImageGrabber(visionTable);

            Mat HsvIn = new Mat(), HsvOut = new Mat(), output = new Mat(), Temp = new Mat();
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            //VectorOfVectorOfPoint filteredContours = new VectorOfVectorOfPoint();

            //MCvScalar low = new MCvScalar(63, 44, 193);
            //MCvScalar high = new MCvScalar(97, 255, 255);

            double[] defaultLow            = new double[] { 50, 44, 193 };
            double[] defaultHigh           = new double[] { 90, 255, 255 };

            VectorOfDouble arrayLow  = new VectorOfDouble(3);
            VectorOfDouble arrayHigh = new VectorOfDouble(3);

            Point TopMidPoint    = new Point((int)(ImageWidth / 2), 0);
            Point BottomMidPoint = new Point((int)(ImageWidth / 2), (int)ImageHeight);

            Point LeftMidPoint  = new Point(0, (int)(ImageHeight / 2));
            Point RightMidPoint = new Point((int)ImageWidth, (int)(ImageHeight / 2));

            Stopwatch sw = new Stopwatch();

            CameraWatcher cameraChecker = new CameraWatcher();
            //UsbManager2 cameraChecker = new UsbManager2();
            //cameraChecker.startWatcher();

            int count = 0;

            //visionTable.PutNumberArray("HSVLow", defaultLow);
            //visionTable.PutNumberArray("HSVHigh", defaultHigh);

            //visionTable.PutNumber("ShooterOffsetDegreesX", ShooterOffsetDegreesX);
            //visionTable.PutNumber("ShooterOffsetDegreesY", ShooterOffsetDegreesY);

            Thread timer = new Thread(() =>
            {
                while (true)
                {
                    // update kangaroo battery info
                    visionTable.PutNumber("KangarooBattery",
                                          System.Windows.Forms.SystemInformation.PowerStatus.BatteryLifePercent);

                    // check camera status
                    int cameraState = cameraChecker.CheckState;
                    // camera states:
                    // 0 = Camera is found and working
                    // 1 = Camera is not found, waiting for reconnect to reinitialize
                    // 2 = Camera was found again, re-init was kicked off
                    visionTable.PutNumber("CameraState", cameraState);
                    if (cameraState == 0)
                    {
                        // Camera is connected and fine
                        //Console.WriteLine("Camera alive");
                    }
                    else if (cameraState == 1)
                    {
                        // Camera is disconnected or having problems
                        //Console.WriteLine("Camera dead, waiting for reconnect");
                    }
                    else if (cameraState == 2)
                    {
                        // Camera reconnected
                        //Console.WriteLine("Camera found again, reinitializing");
                        Process.Start("C:/Users/Shockwave/Desktop/NewKangaroo/cameraRestart.exe");     // Launch external exe to kill process, set up camera, and restart
                    }

                    Thread.Sleep(5000);
                }
            });
            timer.Start();
            GC.KeepAlive(timer);
            int imageCount = 0;

            ImageBuffer im  = new ImageBuffer();
            Capture     cap = new Capture(0); //Change me to 1 to use external camera
            cap.FlipVertical = true;

            cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth, 1280);
            cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight, 720);

            ImageSaver saver = new ImageSaver();
            //int saveCount = 0;
            int  rdi        = 1;
            int  kernalSize = 6 * rdi + 1;
            Size ksize      = new Size(kernalSize, kernalSize);

            while (true)
            {
                count++;
                sw.Restart();
                //ImageBuffer image = imageGrabber.Image();
                cap.Grab();
                im.GyroAngle = visionTable.GetNumber("Gyro", 0.0);
                cap.Retrieve(im.Image);

                ImageBuffer image = im.Clone();

#if KANGAROO
                visionTable.PutNumber("KangarooHeartBeat", count);
#endif
                if (image == null || image.IsEmpty)
                {
                    image?.Dispose();
                    Thread.Yield();
                    continue;
                }

                /*
                 * // Image saver for debugging
                 * if (visionTable.GetBoolean("LightsOn", false))
                 * {
                 *  saveCount++;
                 *  if (saveCount >= 6)
                 *  {
                 *      saver.AddToQueue(image.Image);
                 *      saveCount = 0;
                 *  }
                 * }*/

                double[] ntLow  = visionTable.GetNumberArray("HSVLow", defaultLow);
                double[] ntHigh = visionTable.GetNumberArray("HSVHigh", defaultHigh);

                if (ntLow.Length != 3)
                {
                    ntLow = defaultLow;
                }
                if (ntHigh.Length != 3)
                {
                    ntHigh = defaultHigh;
                }

                arrayLow.Clear();
                arrayLow.Push(ntLow);
                arrayHigh.Clear();
                arrayHigh.Push(ntHigh);

                Mat BlurTemp = new Mat();
                CvInvoke.GaussianBlur(image.Image, BlurTemp, ksize, rdi);
                Mat oldImage = image.Image;
                image.Image = BlurTemp;
                oldImage.Dispose();

                //HSV Filter
                CvInvoke.CvtColor(image.Image, HsvIn, Emgu.CV.CvEnum.ColorConversion.Bgr2Hsv);
                CvInvoke.InRange(HsvIn, arrayLow, arrayHigh, HsvOut);

                HsvOut.ConvertTo(Temp, DepthType.Cv8U);
                //Contours
                CvInvoke.FindContours(Temp, contours, null, RetrType.List, ChainApproxMethod.ChainApproxTc89Kcos);
                //CvInvoke.DrawContours(output, contours, -1, new MCvScalar(0, 0, 0));

                VectorOfVectorOfPoint convexHulls = new VectorOfVectorOfPoint(contours.Size);

                for (int i = 0; i < contours.Size; i++)
                {
                    CvInvoke.ConvexHull(contours[i], convexHulls[i]);
                }

                Rectangle?largestRectangle   = null;
                double    currentLargestArea = 0.0;

                //Filter contours
                for (int i = 0; i < convexHulls.Size; i++)
                {
                    VectorOfPoint contour = convexHulls[i];
                    VectorOfPoint polygon = new VectorOfPoint(convexHulls.Size);
                    CvInvoke.ApproxPolyDP(contour, polygon, 10, true);

                    //VectorOfVectorOfPoint cont = new VectorOfVectorOfPoint(1);
                    //cont.Push(polygon);

                    //CvInvoke.DrawContours(image.Image, cont,-1, Green, 2);

                    // Filter if shape has more than 4 corners after contour is applied
                    if (polygon.Size != 4)
                    {
                        polygon.Dispose();
                        continue;
                    }

                    // Filter if not convex
                    if (!CvInvoke.IsContourConvex(polygon))
                    {
                        polygon.Dispose();
                        continue;
                    }

                    ///////////////////////////////////////////////////////////////////////
                    // Filter if there isn't a nearly horizontal line
                    ///////////////////////////////////////////////////////////////////////
                    //int numVertical = 0;
                    int numHorizontal = 0;
                    for (int j = 0; j < 4; j++)
                    {
                        double dx    = polygon[j].X - polygon[(j + 1) % 4].X;
                        double dy    = polygon[j].Y - polygon[(j + 1) % 4].Y;
                        double slope = double.MaxValue;

                        if (dx != 0)
                        {
                            slope = Math.Abs(dy / dx);
                        }

                        double nearlyHorizontalSlope = Math.Tan(ToRadians(20));
                        //double rad = ToRadians(60);
                        //double nearlyVerticalSlope = Math.Tan(rad);

                        //if (slope > nearlyVerticalSlope) numVertical++;
                        if (slope < nearlyHorizontalSlope)
                        {
                            numHorizontal++;
                        }
                    }

                    if (numHorizontal < 1)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////
                    //CvInvoke.PutText(image.Image, "Number of horizontal (>=1): " + (numHorizontal).ToString(), TextPoint4, FontFace.HersheyPlain, 2, Green);

                    ///////////////////////////////////////////////////////////////////////
                    // Filter if polygon is above a set limit. This should remove overhead lights and windows
                    ///////////////////////////////////////////////////////////////////////
                    Rectangle bounds = CvInvoke.BoundingRectangle(polygon);
                    CvInvoke.PutText(image.Image, "Vertical (>=300): " + (bounds.Location.Y).ToString(), TextPoint, FontFace.HersheyPlain, 2, Green);
                    int topY = 300;
                    if (bounds.Location.Y < topY)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    CvInvoke.PutText(image.Image, "Image Height (45-115) and Width (65-225): " + bounds.Height.ToString() + " , " + bounds.Width, TextPoint2, FontFace.HersheyPlain, 2, Green);

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by minimum and maximum height
                    ///////////////////////////////////////////////////////////////////////
                    if (bounds.Height < 45 || bounds.Height > 115)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by minimum and maximum width
                    ///////////////////////////////////////////////////////////////////////
                    if (bounds.Width < 65 || bounds.Width > 225)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by height to width ratio
                    ///////////////////////////////////////////////////////////////////////
                    double ratio = (double)bounds.Height / bounds.Width;
                    CvInvoke.PutText(image.Image, "Ratio: " + ratio.ToString(), TextPoint3, FontFace.HersheyPlain, 2, Green);
                    if (ratio > 1.0 || ratio < .3)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by area to vertical position ratio
                    ///////////////////////////////////////////////////////////////////////
                    double area          = CvInvoke.ContourArea(contour);
                    double areaVertRatio = area / (1280 - bounds.Location.Y);
                    CvInvoke.PutText(image.Image, "Area/Vert Ratio (8-19): " + areaVertRatio.ToString(), TextPoint4, FontFace.HersheyPlain, 2, Green);

                    if (areaVertRatio < 8 || areaVertRatio > 19)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    //CvInvoke.PutText(image.Image, "Area: " + area.ToString(), TextPoint2, FontFace.HersheyPlain, 2, Green);

                    CvInvoke.Rectangle(image.Image, bounds, Blue, 2);

                    if (area > currentLargestArea)
                    {
                        largestRectangle = bounds;
                    }

                    //filteredContours.Push(contour);

                    polygon.Dispose();
                }
                visionTable.PutBoolean("TargetFound", largestRectangle != null);
                //CvInvoke.PutText(image.Image, "Target found: " + (largestRectangle != null).ToString(), TextPoint5, FontFace.HersheyPlain, 2, Green);


                if (largestRectangle != null)
                {
                    ProcessData(largestRectangle.Value, image);
                    CvInvoke.Rectangle(image.Image, largestRectangle.Value, Red, 5);
                }

                //ToDo, Draw Crosshairs
                //CvInvoke.Line(image.Image, TopMidPoint, BottomMidPoint, Blue, 3);
                //CvInvoke.Line(image.Image, LeftMidPoint, RightMidPoint, Blue, 3);

                //int fps = (int)(1.0 / sw.Elapsed.TotalSeconds);
                //CvInvoke.PutText(image.Image, fps.ToString(), TextPoint, FontFace.HersheyPlain, 2, Green);

                imageCount++;

                // Uncomment below to see the HSV window
                //CvInvoke.Imshow("HSV", HsvOut);
                // Uncomment below to see the main image window
                CvInvoke.Imshow("MainWindow", image.Image);
                image.Dispose();



                //report to NetworkTables

                //Cleanup

                for (int i = 0; i < contours.Size; i++)
                {
                    contours[i].Dispose();
                }
                contours.Clear();

                for (int i = 0; i < convexHulls.Size; i++)
                {
                    convexHulls[i].Dispose();
                }
                convexHulls.Dispose();

                /*
                 * for (int i = 0; i < filteredContours.Size; i++)
                 * {
                 *  filteredContours[i].Dispose();
                 * }
                 * filteredContours.Clear();
                 */

                CvInvoke.WaitKey(1);
            }
            ;
        }