コード例 #1
0
ファイル: Main.cs プロジェクト: V0939965/Assembly-Machine
 private void GetOriginImage()
 {
     try
     {
         using (Mat img = CvInvoke.Imread(@"img\origin.bmp", Emgu.CV.CvEnum.ImreadModes.Grayscale))
         {
             Image <Gray, byte> _img = img.ToImage <Gray, byte>();
             _img = ComputerVison.RoiImage(_img, Config.Parameter.ROI);
             VectorOfPoint cnt = new VectorOfPoint();
             cnt = ComputerVison.FindContours(_img, Config.Parameter.THRESHOLD_VALUE);
             RotatedRect a = CvInvoke.MinAreaRect(cnt);
             ORGRec         = a.MinAreaRect();
             ORGRec.X      += Config.Parameter.ROI.X - 10;
             ORGRec.Y      += Config.Parameter.ROI.Y - 10;
             ORGRec.Height += 20;
             ORGRec.Width  += 20;
             using (Image <Bgr, byte> iBgr2 = _img.Convert <Bgr, byte>())
             {
                 CvInvoke.Rectangle(iBgr2, ORGRec, new MCvScalar(0, 255, 0), 3);
                 CvInvoke.Imwrite("img\\originRoi.bmp", iBgr2);
             }
             Point[] p = ComputerVison.Search2Tip(cnt);
             ORG.SetPointA(p[0]);
             ORG.SetPointB(p[1]);
             cnt.Dispose();
             _img.Dispose();
         }
     }
     catch (Exception er)
     {
         MessageBox.Show(er.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Warning);
     }
 }
コード例 #2
0
        public static Tuple <VectorOfPoint, double> MarkDetection(Image <Gray, byte> ImgBinary, VectorOfPoint Template)
        {
            VectorOfPoint mark         = null;
            double        crScore      = 1;
            double        areaTemplate = CvInvoke.ContourArea(Template);

            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(ImgBinary, contours, null, Emgu.CV.CvEnum.RetrType.Ccomp, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                for (int i = 0; i < contours.Size; i++)
                {
                    double scoreMatching = CvInvoke.MatchShapes(Template, contours[i], Emgu.CV.CvEnum.ContoursMatchType.I3);
                    double scoreCurrent  = CvInvoke.ContourArea(contours[i]);
                    double scoreArea     = Math.Min(areaTemplate, scoreCurrent) / Math.Max(areaTemplate, scoreCurrent);
                    scoreArea = 1 - scoreArea;
                    double score = Math.Max(scoreMatching, scoreArea);
                    if (score < crScore)
                    {
                        if (mark != null)
                        {
                            mark.Dispose();
                            mark = null;
                        }
                        crScore = score;
                        mark    = new VectorOfPoint(contours[i].ToArray());
                    }
                }
            }
            return(new Tuple <VectorOfPoint, double>(mark, crScore));
        }
コード例 #3
0
ファイル: Edge.cs プロジェクト: M1S2/JigsawPuzzleSolver
        //##############################################################################################################################################################################################

        public Edge(string pieceID, int edgeNumber, LocalDriveBitmap pieceImgColor, VectorOfPoint edgeContour, IProgress <LogEvent> logHandle, CancellationToken cancelToken)
        {
            _logHandle   = logHandle;
            _cancelToken = cancelToken;
            PieceID      = pieceID;
            EdgeNumber   = edgeNumber;
            contour      = edgeContour;
            if (PluginFactory.GetGeneralSettingsPlugin().SolverShowDebugResults)
            {
                PieceImgColor = pieceImgColor;
                ContourImg    = new LocalDriveBitmap(System.IO.Path.GetDirectoryName(PieceImgColor.LocalFilePath) + @"\Edges\" + PieceID + "_Edge#" + edgeNumber.ToString() + ".png", null);
            }

            NormalizedContour = normalize(contour);    //Normalized contours are used for comparisons

            VectorOfPoint contourCopy = new VectorOfPoint(contour.ToArray().Reverse().ToArray());

            ReverseNormalizedContour = normalize(contourCopy);   //same as normalized contour, but flipped 180 degrees
            contourCopy.Dispose();

            classify();
        }
コード例 #4
0
 protected override void DisposeObject()
 {
     if (_modelKeypoints != null)
     {
         _modelKeypoints.Dispose();
         _modelKeypoints = null;
     }
     if (_modelDescriptors != null)
     {
         _modelDescriptors.Dispose();
         _modelDescriptors = null;
     }
     if (_modelDescriptorMatcher != null)
     {
         _modelDescriptorMatcher.Dispose();
         _modelDescriptorMatcher = null;
     }
     if (_octagon != null)
     {
         _octagon.Dispose();
         _octagon = null;
     }
 }
コード例 #5
0
ファイル: Main.cs プロジェクト: V0939965/Assembly-Machine
        private void Handling()
        {
            Response result = new Response();
            string   name   = DateTime.Now.Year.ToString() + "_" + DateTime.Now.Month.ToString() + "_" + DateTime.Now.Day.ToString() + "_" + DateTime.Now.Hour.ToString() + "_" + DateTime.Now.Minute.ToString() + "_" + DateTime.Now.Second.ToString();

            Light_Mode(modeCamera.Checked);
            Thread.Sleep(500);
            Image <Bgr, byte> iBgr = GetImage();

            Light_Mode(false);
            Image <Gray, byte> iGray = iBgr.Convert <Gray, byte>();

            iGray = ComputerVison.RoiImage(iGray, Config.Parameter.ROI);
            VectorOfPoint cnt = new VectorOfPoint();

            Point[] p = new Point[2];
            cnt = ComputerVison.FindContours(iGray, Config.Parameter.THRESHOLD_VALUE);
            if (cnt != null)
            {
                p = ComputerVison.Search2Tip(cnt);
                ComputerVison.Calculator(ref result, ORG.PointA, ORG.PointB, p[0], p[1], Config.Parameter.LabelSize.Width, false);
                ComputerVison.RouPoint(ORG.PointO, ref p[0], result.ANGLE);
                ComputerVison.RouPoint(ORG.PointO, ref p[1], result.ANGLE);
                ComputerVison.Calculator(ref result, ORG.PointA, ORG.PointB, p[0], p[1], Config.Parameter.LabelSize.Width, true);
                short y = (short)Math.Round(result.X * Config.Parameter.PULSE_Y + 150);
                short x = (short)Math.Round(result.Y * Config.Parameter.PULSE_X + 20);
                short z = (short)Math.Round(-result.ANGLE * Config.Parameter.PULSE_Z / 360);
                if (modeCamera.Checked)
                {
                    PLCCommunicate(x, y, z);
                }
                //log.Invoke(new MethodInvoker(delegate ()
                //{
                //    log.Text = "X:" + x.ToString("F3") + " Y:" + y.ToString("F3") + " Angle:" + z.ToString();
                //}));
                iGray = iBgr.Convert <Gray, byte>();
                ComputerVison.RotationImage(ref iGray, Config.Parameter.ROTATION_CENTER, (float)result.ANGLE);
                cnt = ComputerVison.FindContours(iGray, Config.Parameter.THRESHOLD_VALUE);
                using (Image <Bgr, byte> iBgr2 = iGray.Convert <Bgr, byte>())
                {
                    if (cnt != null)
                    {
                        RotatedRect r = CvInvoke.MinAreaRect(cnt);
                        CvInvoke.Rectangle(iBgr2, r.MinAreaRect(), new MCvScalar(0, 255, 0), 3);
                    }
                    if (modeCamera.Checked)
                    {
                        CvInvoke.Imwrite(@"backup\" + name + ".bmp", iBgr);
                        CvInvoke.Imwrite(@"backup_H\" + name + "trans.bmp", iBgr2);
                    }
                    p_imShow.Invoke(new MethodInvoker(delegate()
                    {
                        p_imShow.Image = iBgr2.ToBitmap();
                    }));
                }
                cnt.Dispose();
            }
            else
            {
                using (Image <Bgr, byte> iBgr2 = iGray.Convert <Bgr, byte>())
                {
                    CvInvoke.Rectangle(iBgr2, ORGRec, new MCvScalar(0, 255, 0), 3);
                    p_imShow.Invoke(new MethodInvoker(delegate()
                    {
                        p_imShow.Image = iBgr2.ToBitmap();
                    }));
                }
                DialogResult kq = MessageBox.Show("Not found label from images! You want to try again!", "Warning", MessageBoxButtons.YesNo, MessageBoxIcon.Warning);
                if (kq == DialogResult.Yes)
                {
                    iGray.Dispose();
                    iBgr.Dispose();
                    Handling();
                }
                else
                {
                    result.ANGLE = 0;
                    result.X     = 0;
                    result.Y     = 0;
                }
            }
            iGray.Dispose();
            iBgr.Dispose();
        }
コード例 #6
0
        private void CaptureMark()
        {
            //mPlcComm.Logout();
            bool lightStrobe = !Convert.ToBoolean(mParam.LIGHT_MODE);

            System.Drawing.Point[] markPointXYPLC = mModel.GetPLCMarkPosition();
            PadItem[] PadMark = new PadItem[2];
            for (int i = 0; i < 2; i++)
            {
                PadMark[i] = mModel.Gerber.PadItems[mModel.Gerber.MarkPoint.PadMark[i]];
            }
            mMark = new System.Drawing.Point[2];
            double matchingScore = mModel.Gerber.MarkPoint.Score;

            for (int i = 0; i < markPointXYPLC.Length; i++)
            {
                System.Drawing.Point mark = markPointXYPLC[i];
                int x = mark.X;
                int y = mark.Y;
                mLog.Info(string.Format("{0}, Position Name : {1},  X = {2}, Y = {3}", "Moving TOP Axis", "Mark " + (i + 1).ToString(), x, y));
                using (Image <Bgr, byte> image = VI.CaptureImage.CaptureFOV(mPlcComm, mCamera, mark))
                {
                    if (image != null)
                    {
                        System.Drawing.Rectangle ROI = mModel.GetRectROIMark();
                        image.ROI = ROI;
                        using (Image <Gray, byte> imgGray = new Image <Gray, byte>(image.Size))
                        {
                            CvInvoke.CvtColor(image, imgGray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                            CvInvoke.Threshold(imgGray, imgGray, mModel.Gerber.MarkPoint.ThresholdValue, 255, Emgu.CV.CvEnum.ThresholdType.Binary);
                            VectorOfPoint cnt      = new VectorOfPoint(PadMark[i].Contour);
                            var           markInfo = Mark.MarkDetection(imgGray, cnt);
                            cnt.Dispose();
                            cnt = null;
                            double realScore = markInfo.Item2;
                            realScore = Math.Round((1 - realScore) * 100.0, 2);
                            if (realScore > matchingScore)
                            {
                                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                                {
                                    if (markInfo.Item1 != null)
                                    {
                                        contours.Push(markInfo.Item1);
                                        Moments mm = CvInvoke.Moments(markInfo.Item1);
                                        if (mm.M00 != 0)
                                        {
                                            mMark[i] = new System.Drawing.Point(Convert.ToInt32(mm.M10 / mm.M00), Convert.ToInt32(mm.M01 / mm.M00));
                                        }
                                    }
                                }
                                if (i == 1)
                                {
                                    mMarkAdjust.Status = Utils.ActionStatus.Successfully;
                                    System.Drawing.Point ct = new System.Drawing.Point(image.Width / 2, image.Height / 2);
                                    mMarkAdjust.X = ct.X - mMark[0].X;
                                    mMarkAdjust.Y = ct.Y - mMark[0].Y;
                                }
                            }
                            else
                            {
                                mLog.Info(string.Format("Score matching is lower score standard... {0} < {1}", realScore, matchingScore));
                                mMarkAdjust.Status = Utils.ActionStatus.Fail;
                                break;
                            }
                        }
                    }
                    else
                    {
                        mLog.Info(string.Format("Cant Capture image in Mark : {0}", i + 1));
                        mMarkAdjust.Status = Utils.ActionStatus.Fail;
                        break;
                    }
                }
            }
        }
コード例 #7
0
        static void Main(string[] args)
        {
            NetworkTable.SetClientMode();
            NetworkTable.SetTeam(4488);
            NetworkTable.SetIPAddress("10.44.88.2");
#if KANGAROO
            NetworkTable.SetNetworkIdentity("Kangaroo");
#else
            NetworkTable.SetNetworkIdentity("CameraTracking");
#endif
            //Switch between Kangaroo and Desktop.
            //On kangaroo, use different table and don't display image
            visionTable = NetworkTable.GetTable("SmartDashboard");

            //ImageGrabber imageGrabber = new ImageGrabber(visionTable);

            Mat HsvIn = new Mat(), HsvOut = new Mat(), output = new Mat(), Temp = new Mat();
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            //VectorOfVectorOfPoint filteredContours = new VectorOfVectorOfPoint();

            //MCvScalar low = new MCvScalar(63, 44, 193);
            //MCvScalar high = new MCvScalar(97, 255, 255);

            double[] defaultLow            = new double[] { 50, 44, 193 };
            double[] defaultHigh           = new double[] { 90, 255, 255 };

            VectorOfDouble arrayLow  = new VectorOfDouble(3);
            VectorOfDouble arrayHigh = new VectorOfDouble(3);

            Point TopMidPoint    = new Point((int)(ImageWidth / 2), 0);
            Point BottomMidPoint = new Point((int)(ImageWidth / 2), (int)ImageHeight);

            Point LeftMidPoint  = new Point(0, (int)(ImageHeight / 2));
            Point RightMidPoint = new Point((int)ImageWidth, (int)(ImageHeight / 2));

            Stopwatch sw = new Stopwatch();

            CameraWatcher cameraChecker = new CameraWatcher();
            //UsbManager2 cameraChecker = new UsbManager2();
            //cameraChecker.startWatcher();

            int count = 0;

            //visionTable.PutNumberArray("HSVLow", defaultLow);
            //visionTable.PutNumberArray("HSVHigh", defaultHigh);

            //visionTable.PutNumber("ShooterOffsetDegreesX", ShooterOffsetDegreesX);
            //visionTable.PutNumber("ShooterOffsetDegreesY", ShooterOffsetDegreesY);

            Thread timer = new Thread(() =>
            {
                while (true)
                {
                    // update kangaroo battery info
                    visionTable.PutNumber("KangarooBattery",
                                          System.Windows.Forms.SystemInformation.PowerStatus.BatteryLifePercent);

                    // check camera status
                    int cameraState = cameraChecker.CheckState;
                    // camera states:
                    // 0 = Camera is found and working
                    // 1 = Camera is not found, waiting for reconnect to reinitialize
                    // 2 = Camera was found again, re-init was kicked off
                    visionTable.PutNumber("CameraState", cameraState);
                    if (cameraState == 0)
                    {
                        // Camera is connected and fine
                        //Console.WriteLine("Camera alive");
                    }
                    else if (cameraState == 1)
                    {
                        // Camera is disconnected or having problems
                        //Console.WriteLine("Camera dead, waiting for reconnect");
                    }
                    else if (cameraState == 2)
                    {
                        // Camera reconnected
                        //Console.WriteLine("Camera found again, reinitializing");
                        Process.Start("C:/Users/Shockwave/Desktop/NewKangaroo/cameraRestart.exe");     // Launch external exe to kill process, set up camera, and restart
                    }

                    Thread.Sleep(5000);
                }
            });
            timer.Start();
            GC.KeepAlive(timer);
            int imageCount = 0;

            ImageBuffer im  = new ImageBuffer();
            Capture     cap = new Capture(0); //Change me to 1 to use external camera
            cap.FlipVertical = true;

            cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth, 1280);
            cap.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight, 720);

            ImageSaver saver = new ImageSaver();
            //int saveCount = 0;
            int  rdi        = 1;
            int  kernalSize = 6 * rdi + 1;
            Size ksize      = new Size(kernalSize, kernalSize);

            while (true)
            {
                count++;
                sw.Restart();
                //ImageBuffer image = imageGrabber.Image();
                cap.Grab();
                im.GyroAngle = visionTable.GetNumber("Gyro", 0.0);
                cap.Retrieve(im.Image);

                ImageBuffer image = im.Clone();

#if KANGAROO
                visionTable.PutNumber("KangarooHeartBeat", count);
#endif
                if (image == null || image.IsEmpty)
                {
                    image?.Dispose();
                    Thread.Yield();
                    continue;
                }

                /*
                 * // Image saver for debugging
                 * if (visionTable.GetBoolean("LightsOn", false))
                 * {
                 *  saveCount++;
                 *  if (saveCount >= 6)
                 *  {
                 *      saver.AddToQueue(image.Image);
                 *      saveCount = 0;
                 *  }
                 * }*/

                double[] ntLow  = visionTable.GetNumberArray("HSVLow", defaultLow);
                double[] ntHigh = visionTable.GetNumberArray("HSVHigh", defaultHigh);

                if (ntLow.Length != 3)
                {
                    ntLow = defaultLow;
                }
                if (ntHigh.Length != 3)
                {
                    ntHigh = defaultHigh;
                }

                arrayLow.Clear();
                arrayLow.Push(ntLow);
                arrayHigh.Clear();
                arrayHigh.Push(ntHigh);

                Mat BlurTemp = new Mat();
                CvInvoke.GaussianBlur(image.Image, BlurTemp, ksize, rdi);
                Mat oldImage = image.Image;
                image.Image = BlurTemp;
                oldImage.Dispose();

                //HSV Filter
                CvInvoke.CvtColor(image.Image, HsvIn, Emgu.CV.CvEnum.ColorConversion.Bgr2Hsv);
                CvInvoke.InRange(HsvIn, arrayLow, arrayHigh, HsvOut);

                HsvOut.ConvertTo(Temp, DepthType.Cv8U);
                //Contours
                CvInvoke.FindContours(Temp, contours, null, RetrType.List, ChainApproxMethod.ChainApproxTc89Kcos);
                //CvInvoke.DrawContours(output, contours, -1, new MCvScalar(0, 0, 0));

                VectorOfVectorOfPoint convexHulls = new VectorOfVectorOfPoint(contours.Size);

                for (int i = 0; i < contours.Size; i++)
                {
                    CvInvoke.ConvexHull(contours[i], convexHulls[i]);
                }

                Rectangle?largestRectangle   = null;
                double    currentLargestArea = 0.0;

                //Filter contours
                for (int i = 0; i < convexHulls.Size; i++)
                {
                    VectorOfPoint contour = convexHulls[i];
                    VectorOfPoint polygon = new VectorOfPoint(convexHulls.Size);
                    CvInvoke.ApproxPolyDP(contour, polygon, 10, true);

                    //VectorOfVectorOfPoint cont = new VectorOfVectorOfPoint(1);
                    //cont.Push(polygon);

                    //CvInvoke.DrawContours(image.Image, cont,-1, Green, 2);

                    // Filter if shape has more than 4 corners after contour is applied
                    if (polygon.Size != 4)
                    {
                        polygon.Dispose();
                        continue;
                    }

                    // Filter if not convex
                    if (!CvInvoke.IsContourConvex(polygon))
                    {
                        polygon.Dispose();
                        continue;
                    }

                    ///////////////////////////////////////////////////////////////////////
                    // Filter if there isn't a nearly horizontal line
                    ///////////////////////////////////////////////////////////////////////
                    //int numVertical = 0;
                    int numHorizontal = 0;
                    for (int j = 0; j < 4; j++)
                    {
                        double dx    = polygon[j].X - polygon[(j + 1) % 4].X;
                        double dy    = polygon[j].Y - polygon[(j + 1) % 4].Y;
                        double slope = double.MaxValue;

                        if (dx != 0)
                        {
                            slope = Math.Abs(dy / dx);
                        }

                        double nearlyHorizontalSlope = Math.Tan(ToRadians(20));
                        //double rad = ToRadians(60);
                        //double nearlyVerticalSlope = Math.Tan(rad);

                        //if (slope > nearlyVerticalSlope) numVertical++;
                        if (slope < nearlyHorizontalSlope)
                        {
                            numHorizontal++;
                        }
                    }

                    if (numHorizontal < 1)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////
                    //CvInvoke.PutText(image.Image, "Number of horizontal (>=1): " + (numHorizontal).ToString(), TextPoint4, FontFace.HersheyPlain, 2, Green);

                    ///////////////////////////////////////////////////////////////////////
                    // Filter if polygon is above a set limit. This should remove overhead lights and windows
                    ///////////////////////////////////////////////////////////////////////
                    Rectangle bounds = CvInvoke.BoundingRectangle(polygon);
                    CvInvoke.PutText(image.Image, "Vertical (>=300): " + (bounds.Location.Y).ToString(), TextPoint, FontFace.HersheyPlain, 2, Green);
                    int topY = 300;
                    if (bounds.Location.Y < topY)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    CvInvoke.PutText(image.Image, "Image Height (45-115) and Width (65-225): " + bounds.Height.ToString() + " , " + bounds.Width, TextPoint2, FontFace.HersheyPlain, 2, Green);

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by minimum and maximum height
                    ///////////////////////////////////////////////////////////////////////
                    if (bounds.Height < 45 || bounds.Height > 115)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by minimum and maximum width
                    ///////////////////////////////////////////////////////////////////////
                    if (bounds.Width < 65 || bounds.Width > 225)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by height to width ratio
                    ///////////////////////////////////////////////////////////////////////
                    double ratio = (double)bounds.Height / bounds.Width;
                    CvInvoke.PutText(image.Image, "Ratio: " + ratio.ToString(), TextPoint3, FontFace.HersheyPlain, 2, Green);
                    if (ratio > 1.0 || ratio < .3)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    ///////////////////////////////////////////////////////////////////////
                    // Filter by area to vertical position ratio
                    ///////////////////////////////////////////////////////////////////////
                    double area          = CvInvoke.ContourArea(contour);
                    double areaVertRatio = area / (1280 - bounds.Location.Y);
                    CvInvoke.PutText(image.Image, "Area/Vert Ratio (8-19): " + areaVertRatio.ToString(), TextPoint4, FontFace.HersheyPlain, 2, Green);

                    if (areaVertRatio < 8 || areaVertRatio > 19)
                    {
                        polygon.Dispose();
                        continue;
                    }
                    ///////////////////////////////////////////////////////////////////////

                    //CvInvoke.PutText(image.Image, "Area: " + area.ToString(), TextPoint2, FontFace.HersheyPlain, 2, Green);

                    CvInvoke.Rectangle(image.Image, bounds, Blue, 2);

                    if (area > currentLargestArea)
                    {
                        largestRectangle = bounds;
                    }

                    //filteredContours.Push(contour);

                    polygon.Dispose();
                }
                visionTable.PutBoolean("TargetFound", largestRectangle != null);
                //CvInvoke.PutText(image.Image, "Target found: " + (largestRectangle != null).ToString(), TextPoint5, FontFace.HersheyPlain, 2, Green);


                if (largestRectangle != null)
                {
                    ProcessData(largestRectangle.Value, image);
                    CvInvoke.Rectangle(image.Image, largestRectangle.Value, Red, 5);
                }

                //ToDo, Draw Crosshairs
                //CvInvoke.Line(image.Image, TopMidPoint, BottomMidPoint, Blue, 3);
                //CvInvoke.Line(image.Image, LeftMidPoint, RightMidPoint, Blue, 3);

                //int fps = (int)(1.0 / sw.Elapsed.TotalSeconds);
                //CvInvoke.PutText(image.Image, fps.ToString(), TextPoint, FontFace.HersheyPlain, 2, Green);

                imageCount++;

                // Uncomment below to see the HSV window
                //CvInvoke.Imshow("HSV", HsvOut);
                // Uncomment below to see the main image window
                CvInvoke.Imshow("MainWindow", image.Image);
                image.Dispose();



                //report to NetworkTables

                //Cleanup

                for (int i = 0; i < contours.Size; i++)
                {
                    contours[i].Dispose();
                }
                contours.Clear();

                for (int i = 0; i < convexHulls.Size; i++)
                {
                    convexHulls[i].Dispose();
                }
                convexHulls.Dispose();

                /*
                 * for (int i = 0; i < filteredContours.Size; i++)
                 * {
                 *  filteredContours[i].Dispose();
                 * }
                 * filteredContours.Clear();
                 */

                CvInvoke.WaitKey(1);
            }
            ;
        }