예제 #1
0
        public static System.Drawing.Bitmap Process(Bitmap imgRecv, bool showBestOnly, bool showCrosshairs, out bool locked)
        {
            Image<Bgr, Byte> img = new Image<Bgr, byte>(imgRecv).Resize(352,288,Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
            //Convert the image to grayscale and filter out the noise
            Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp();

            Gray cannyThreshold = new Gray(250);
            Gray cannyThresholdLinking = new Gray(250);
            Gray circleAccumulatorThreshold = new Gray(120);

            CircleF[] circles = gray.HoughCircles(
                cannyThreshold,
                circleAccumulatorThreshold,
                5.0, //Resolution of the accumulator used to detect centers of the circles
                10.0, //min distance
                5, //min radius
                0 //max radius
                )[0]; //Get the circles from the first channel

            Image<Gray, Byte> cannyEdges = gray.Canny(100, 80);
            LineSegment2D[] lines = cannyEdges.HoughLinesBinary(
                1, //Distance resolution in pixel-related units
                Math.PI / 45.0, //Angle resolution measured in radians.
                20, //threshold
                30, //min Line width
                10 //gap between lines
                )[0]; //Get the lines from the first channel

            #region Find triangles and rectangles
            List<Triangle2DF> triangleList = new List<Triangle2DF>();
            List<MCvBox2D> boxList = new List<MCvBox2D>();

            using (MemStorage storage = new MemStorage()) //allocate storage for contour approximation
                for (Contour<Point> contours = cannyEdges.FindContours(); contours != null; contours = contours.HNext)
                {
                    Contour<Point> currentContour = contours.ApproxPoly(contours.Perimeter * 0.05, storage);

                    if (contours.Area > 350) //only consider contours with area greater than 250
                    {
                        if (currentContour.Total == 3) //The contour has 3 vertices, it is a triangle
                        {
                            Point[] pts = currentContour.ToArray();
                            triangleList.Add(new Triangle2DF(
                               pts[0],
                               pts[1],
                               pts[2]
                               ));
                        }
                        else if (currentContour.Total == 4) //The contour has 4 vertices.
                        {
                            #region determine if all the angles in the contour are within the range of [80, 100] degree
                            bool isRectangle = true;
                            Point[] pts = currentContour.ToArray();
                            LineSegment2D[] edges = PointCollection.PolyLine(pts, true);
                            double rangle = 0;
                            for (int i = 0; i < edges.Length; i++)
                            {
                                rangle += Math.Abs(
                                   edges[(i + 1) % edges.Length].GetExteriorAngleDegree(edges[i]));

                            }
                            if (rangle < 300 || rangle > 420)
                                isRectangle = false;
                            #endregion

                            if (isRectangle) boxList.Add(currentContour.GetMinAreaRect());
                        }
                    }
                }
            #endregion

            boxList = boxList.OrderByDescending(m=>m.size.Height*m.size.Width).ToList();

            if (showBestOnly && boxList.Count>0)
            {
                //foreach (MCvBox2D box in boxList)

                var box = boxList.First();

                Cross2DF c = new Cross2DF(box.center, (float)box.size.Height * .2F, (float)box.size.Width * .2F);
                img.Draw(box, new Bgr(Color.Blue), 4);
                img.Draw(c, new Bgr(Color.Red), 2);

                //img.Draw(new MCvBox2D(new PointF((float)img.Width / 2, (float)img.Height / 2), new SizeF((float)img.Width / 4, (float)img.Height / 4), 0F),new Bgr(Color.White),1);
            }
            else
            {
                foreach (MCvBox2D box in boxList)
                {
                    Cross2DF c = new Cross2DF(box.center, (float)box.size.Height * .2F, (float)box.size.Width * .2F);
                    img.Draw(box, new Bgr(Color.Blue), 4);
                    img.Draw(c, new Bgr(Color.Red), 2);
                }

            }
            #region draw lines

            locked = false;
                Bitmap im = img.ToBitmap();

                if (showCrosshairs)
                {
                    using (Graphics g = Graphics.FromImage(im))
                    {
                        g.FillRectangle(new SolidBrush(Color.FromArgb(50, 255, 255, 255)), new Rectangle(new Point(im.Width / 2 - im.Width / 8, im.Height / 2 - im.Height / 8), new Size(img.Width / 4, img.Height / 4)));
                        g.DrawRectangle(new Pen(new SolidBrush(Color.FromArgb(140, 255, 255, 255)), 2), new Rectangle(new Point(im.Width / 2 - im.Width / 8, im.Height / 2 - im.Height / 8), new Size(img.Width / 4, img.Height / 4)));
                        g.DrawLine(new Pen(new SolidBrush(Color.FromArgb(140, 255, 255, 255)), 1), new Point(im.Width / 2 - 10, im.Height / 2), new Point(im.Width / 2 + 10, im.Height / 2));
                        g.DrawLine(new Pen(new SolidBrush(Color.FromArgb(140, 255, 255, 255)), 1), new Point(im.Width / 2, im.Height / 2 - 10), new Point(im.Width / 2, im.Height / 2 + 10));
                    }
                }
                foreach (MCvBox2D box in boxList)
                {
                    if (Math.Abs(box.center.X - im.Width / 2) < 20 && Math.Abs(box.center.Y - im.Height / 2) < 20)
                    {
                        using (Graphics g = Graphics.FromImage(im))
                        {
                            g.FillRectangle(new SolidBrush(Color.FromArgb(100, 255, 0, 0)), new Rectangle(0, 0, im.Width, im.Height));
                            g.DrawString("Locked on target", new Font(FontFamily.GenericMonospace, 24f, FontStyle.Bold), new SolidBrush(Color.White), new PointF(13F, 5F));
                            locked = true;
                            break;
                        }
                    }
                }

            return im;
            #endregion
        }
예제 #2
0
        static void Main(string[] args)
        {
            String apiKey = "847e6315f892e21449da5f4077c5104f";
            String apiSecret = "BmskojfFyrZVQhkLfNSnRzX-lK8musO6";
            FaceService faceService = new FaceService(apiKey, apiSecret);

            string filePath = "D:\\Codes\\datasets\\face_morph\\bbt.jpg";
            DetectResult detectResult = faceService.Detection_DetectImg(filePath);

            Image<Bgr, Byte> srcImg = new Image<Bgr, Byte>(filePath);

            for(int cnt=0; cnt < detectResult.face.Count; cnt++)
            {
                string pointFileName = String.Format("D:\\Codes\\datasets\\face_morph\\result_bbt_face_{0}.txt", cnt);
                FileStream fileStream = new FileStream(pointFileName, FileMode.Create);
                StreamWriter streamWriter = new StreamWriter(fileStream);

                Rectangle faceRect = new Rectangle(
                    (int)(detectResult.face[cnt].position.center.x * srcImg.Width /100 - detectResult.face[cnt].position.width * srcImg.Width * 0.5 / 100),
                    (int)(detectResult.face[cnt].position.center.y * srcImg.Height / 100 - detectResult.face[cnt].position.height * srcImg.Height * 0.5 / 100),
                    (int)detectResult.face[cnt].position.width * srcImg.Width / 100,
                    (int)detectResult.face[cnt].position.height * srcImg.Height / 100);

                Image<Bgr, byte> faceImg = srcImg.GetSubRect(faceRect);

                string fileName = String.Format("D:\\Codes\\datasets\\face_morph\\result_bbt_face_{0}.jpg", cnt);
                faceImg.Save(fileName);

                IList<FaceppSDK.Point> featurePoints = new List<FaceppSDK.Point>();
                //featurePoints.Add(detectResult.face[cnt].position.center);
                FaceppSDK.Point tempPoint1 = new FaceppSDK.Point();
                tempPoint1.x = (detectResult.face[cnt].position.eye_left.x
                    - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width;
                tempPoint1.y = (detectResult.face[cnt].position.eye_left.y
                    - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height;
                featurePoints.Add(tempPoint1);

                FaceppSDK.Point tempPoint2 = new FaceppSDK.Point();
                tempPoint2.x = (detectResult.face[cnt].position.eye_right.x
                    - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width;
                tempPoint2.y = (detectResult.face[cnt].position.eye_right.y
                    - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height;
                featurePoints.Add(tempPoint2);

                FaceppSDK.Point tempPoint3 = new FaceppSDK.Point();
                tempPoint3.x = (detectResult.face[cnt].position.mouth_left.x
                    - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width;
                tempPoint3.y = (detectResult.face[cnt].position.mouth_left.y
                    - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height;
                featurePoints.Add(tempPoint3);

                FaceppSDK.Point tempPoint4 = new FaceppSDK.Point();
                tempPoint4.x = (detectResult.face[cnt].position.mouth_right.x
                    - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width;
                tempPoint4.y = (detectResult.face[cnt].position.mouth_right.y
                    - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height;
                featurePoints.Add(tempPoint4);

                FaceppSDK.Point tempPoint5 = new FaceppSDK.Point();
                tempPoint5.x = (detectResult.face[cnt].position.nose.x
                    - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width;
                tempPoint5.y = (detectResult.face[cnt].position.nose.y
                    - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height;
                featurePoints.Add(tempPoint5);

                foreach (FaceppSDK.Point featurePoint in featurePoints)
                {
                    streamWriter.WriteLine(featurePoint.x.ToString());
                    streamWriter.WriteLine(featurePoint.y.ToString());

                    System.Drawing.PointF point = new System.Drawing.PointF((float)featurePoint.x * srcImg.Width / 100,
                        (float)featurePoint.y * srcImg.Height / 100);
                    Cross2DF cross = new Cross2DF(point, (float)3.0, (float)3.0);
                    srcImg.Draw(cross, new Bgr(0, 255, 0), 3);
                }

                streamWriter.Flush();
                streamWriter.Close();
                fileStream.Close();
                //srcImg.Save("D:\\Codes\\datasets\\face_morph\\result_bbt.jpg");
            }
        }
예제 #3
0
        void ComputeDenseOpticalFlow()
        {
            // Compute dense optical flow using Horn and Schunk algo
            velx = new Image<Gray, float>(faceGrayImage.Size);
            vely = new Image<Gray, float>(faceNextGrayImage.Size);

            OpticalFlow.HS(faceGrayImage, faceNextGrayImage, true, velx, vely, 0.1d, new MCvTermCriteria(100));

            #region Dense Optical Flow Drawing
            Size winSize = new Size(10, 10);
            vectorFieldX = (int)Math.Round((double)faceGrayImage.Width / winSize.Width);
            vectorFieldY = (int)Math.Round((double)faceGrayImage.Height / winSize.Height);
            sumVectorFieldX = 0f;
            sumVectorFieldY = 0f;
            vectorField = new PointF[vectorFieldX][];
            for (int i = 0; i < vectorFieldX; i++)
            {
                vectorField[i] = new PointF[vectorFieldY];
                for (int j = 0; j < vectorFieldY; j++)
                {
                    Gray velx_gray = velx[j * winSize.Width, i * winSize.Width];
                    float velx_float = (float)velx_gray.Intensity;
                    Gray vely_gray = vely[j * winSize.Height, i * winSize.Height];
                    float vely_float = (float)vely_gray.Intensity;
                    sumVectorFieldX += velx_float;
                    sumVectorFieldY += vely_float;
                    vectorField[i][j] = new PointF(velx_float, vely_float);

                    Cross2DF cr = new Cross2DF(
                        new PointF((i*winSize.Width) +trackingArea.X,
                                   (j*winSize.Height)+trackingArea.Y),
                                   1, 1);
                    opticalFlowFrame.Draw(cr, new Bgr(Color.Red), 1);

                    LineSegment2D ci = new LineSegment2D(
                        new Point((i*winSize.Width)+trackingArea.X,
                                  (j * winSize.Height)+trackingArea.Y),
                        new Point((int)((i * winSize.Width)  + trackingArea.X + velx_float),
                                  (int)((j * winSize.Height) + trackingArea.Y + vely_float)));
                    opticalFlowFrame.Draw(ci, new Bgr(Color.Yellow), 1);

                }
            }
            #endregion
        }
예제 #4
0
        // =========================================================
        /// <summary>
        /// This function will display markings at specified locations on the image offset from the center of the image
        /// </summary>
        public void DrawMarks(ref Bitmap image, List<PointF> points, Color color, int size)
        {
            try {
                using (Image<Bgr, Byte> img = new Image<Bgr, byte>(image)) {
                    foreach (var pt in points) {
                        PointF p = new PointF(pt.X + videoCapture.FrameCenterX, videoCapture.FrameCenterY-pt.Y);
                        var cross = new Cross2DF(p, size, size);
                        img.Draw(cross, new Bgr(color), 2);
                    }
                    image = img.ToBitmap();
                }
            } catch {

            }
        }
예제 #5
0
        void DrawDenseOpticalFlow()
        {
            int winSizeX = 2;
            int winSizeY = 2;
            vectorFieldX = (int)Math.Round((double)_currentTrackedGrayImage.Width / winSizeX);
            vectorFieldY = (int)Math.Round((double)_currentTrackedGrayImage.Height / winSizeY);
            sumVectorFieldX = 0f;
            sumVectorFieldY = 0f;
            vectorField = new PointF[vectorFieldX][];
            for (int i = 0; i < vectorFieldX; i++)
            {
                vectorField[i] = new PointF[vectorFieldY];
                for (int j = 0; j < vectorFieldY; j++)
                {
                    Gray velx_gray = velx[j * winSizeX, i * winSizeX];
                    float velx_float = (float)velx_gray.Intensity;
                    Gray vely_gray = vely[j * winSizeY, i * winSizeY];
                    float vely_float = (float)vely_gray.Intensity;
                    sumVectorFieldX += velx_float;
                    sumVectorFieldY += vely_float;
                    vectorField[i][j] = new PointF(velx_float, vely_float);

                    Cross2DF cr = new Cross2DF(
                         new PointF((i * winSizeX) + _trackingArea.X,
                                        (j * winSizeY) + _trackingArea.Y),
                                        1, 1);
                    _opticalFlowFrame.Draw(cr, new Bgr(Color.Red), 1);

                    LineSegment2D ci = new LineSegment2D(
                         new Point((i * winSizeX) + _trackingArea.X,
                                      (j * winSizeY) + _trackingArea.Y),
                         new Point((int)((i * winSizeX) + _trackingArea.X + velx_float),
                                      (int)((j * winSizeY) + _trackingArea.Y + vely_float)));
                    _opticalFlowFrame.Draw(ci, new Bgr(Color.Yellow), 1);
                }
            }
        }
        private void imgImageBox_MouseClick(object sender, MouseEventArgs e)
        {
            //Guid currentCoordinates;
            if (lblCurrentPoint.Text == "Draw Point")
            {
                if (ptCounter == 4)
                {
                    imageTransform = imageBackup;
                    imgImageBox.Image = imageBackup;                        //ADDED this!!!!!
                    ptCounter = 0;

                }
                else if (ptCounter < 4)
                {
                    srcpt[ptCounter].X = e.X; //* _orgFrameWidth/_frameWidth;
                    srcpt[ptCounter].Y = e.Y; //* _orgFrameHeight/_frameHeight;

                    //sd["/coordinates/pts"] = new SharedDictionary.Vector();
                    //sd["/coordinates/pts#0"] = new Point(e.X, e.Y);
                    ////this.sd["/coordinate/pts"] = new SharedDictionary.Vector();
                    ////this.sd["/coordinate/pts#0"] = e.X.ToString();
                    //this.sd["/value"] = 5;

                    Cross2DF scrCrossTest = new Cross2DF(srcpt[ptCounter], 5, 5);
                    imageTransform.Draw(scrCrossTest, new Bgr(Color.Red), 2);
                    imgImageBox.Image = imageTransform;//.Resize(_frameWidth, _frameHeight);

                    ptCounter += 1;
                }
            }
            else if (lblCurrentPoint.Text == "Coordinate Transform -->")
            {
                //lblCurrentPoint.Text = "Draw Point";
                //lblCurrentPoint.Text = "Click on left side";
                //currentCoordinates = Guid.NewGuid();
                ////sd["/coordinates/pts"] = new SharedDictionary.Vector();
                ////sd["/coordinates/pts#0"] = new Point ( e.X, e.Y );
                //sd["/coordinates#-0"] = currentCoordinates;

                leftSide[0].X = e.X; //* _orgFrameWidth/_frameWidth;
                leftSide[0].Y = e.Y; //* _orgFrameHeight/_frameHeight;

                CvArray<Byte> arr;
                Matrix<double> m = new Matrix<double>(1, 1, 2);
                Matrix<double> n = new Matrix<double>(1, 1, 2);

                double[] data = { leftSide[0].X, leftSide[0].Y};
                m.Data[0, 0] = e.X;
                m.Data[0, 1] = e.Y;

                //Matrix<double> leftSidePtr = new Matrix<double>(data).Transpose();
                //Matrix<double> leftSideTransformed = new Matrix<double>(1, 3);
                //Matrix<double> leftSidePtrT = new Matrix<double>();
                //Matrix<double> wMat = new Matrix<double>(warpMat);
                //CvInvoke.cvTranspose(leftSidePtr, leftSidePtrT);
                //CvInvoke.cvTranspose(leftSideTransformed, leftSideTransformed);
                CvInvoke.cvPerspectiveTransform(m, n, warpMat.Ptr);

                leftSideT[0].X = (float)n.Data[0,0];
                leftSideT[0].Y = (float)n.Data[0, 1];

                Cross2DF scrCrossTest2 = new Cross2DF(leftSideT[0], 5, 5);
                imagePerspective.Draw(scrCrossTest2, new Bgr(Color.Green), 2);
                imageBoxPers.Image = imagePerspective;//.Resize(_frameWidth, _frameHeight);

                Cross2DF scrCrossTest1 = new Cross2DF(leftSide[0], 5, 5);
                imageTransform.Draw(scrCrossTest1, new Bgr(Color.Green), 2);
                imgImageBox.Image = imageTransform;//.Resize(_frameWidth, _frameHeight);

                lblTouchPt.Text = "[" + leftSideT[0].X + " , " + leftSideT[0].Y + "]";

            }
        }
        private void imageBoxPers_MouseClick(object sender, MouseEventArgs e)
        {
            if (lblCurrentPoint.Text == "Coordinate Transform <--")
            {
                //lblCurrentPoint.Text = "Draw Point";
                //lblCurrentPoint.Text = "Click on left side";
                rightSide[0].X = e.X; //* _orgFrameWidth/_frameWidth;
                rightSide[0].Y = e.Y; //* _orgFrameHeight/_frameHeight;
                //CvInvoke.cvSetData(leftSide, e.X e.Y 1, //leftSide = [e.X, e.Y, 1];

                CvArray<Byte> arr;
                Matrix<double> m = new Matrix<double>(1, 1, 2);
                Matrix<double> n = new Matrix<double>(1, 1, 2);

                double[] data = { rightSide[0].X, rightSide[0].Y };
                m.Data[0, 0] = e.X;
                m.Data[0, 1] = e.Y;

                CvInvoke.cvPerspectiveTransform(m, n, invWarpMat.Ptr);

                rightSideT[0].X = (float)n.Data[0, 0];
                rightSideT[0].Y = (float)n.Data[0, 1];

                Cross2DF scrCrossTest2 = new Cross2DF(rightSideT[0], 5, 5);
                imageTransform.Draw(scrCrossTest2, new Bgr(Color.Green), 2);
                imgImageBox.Image = imageTransform;//.Resize(_frameWidth, _frameHeight);

                Cross2DF scrCrossTest1 = new Cross2DF(rightSide[0], 5, 5);
                imagePerspective.Draw(scrCrossTest1, new Bgr(Color.Green), 2);
                imageBoxPers.Image = imagePerspective;//.Resize(_frameWidth, _frameHeight);
            }
        }
        private void btnTransform_Click(object sender, EventArgs e)
        {
            PointF[] srcTri = new PointF[] { srcpt[0], srcpt[1], srcpt[2], srcpt[3] };

            dstpt[(int)Position.BL].X = srcpt[0].X;
            dstpt[(int)Position.BL].Y = srcpt[0].Y;
            dstpt[(int)Position.TL].X = srcpt[0].X;
            dstpt[(int)Position.TL].Y = srcpt[1].Y;
            dstpt[(int)Position.TR].X = srcpt[2].X;
            dstpt[(int)Position.TR].Y = srcpt[1].Y;
            dstpt[(int)Position.BR].X = srcpt[2].X;
            dstpt[(int)Position.BR].Y = srcpt[0].Y;

            lblBL.Text = "[" + (int)dstpt[(int)Position.BL].X + " , " + (int)dstpt[(int)Position.BL].Y + "]";
            lblTL.Text = "[" + (int)dstpt[(int)Position.TL].X + " , " + (int)dstpt[(int)Position.TL].Y + "]";
            lblTR.Text = "[" + (int)dstpt[(int)Position.TR].X + " , " + (int)dstpt[(int)Position.TR].Y + "]";
            lblBR.Text = "[" + (int)dstpt[(int)Position.BR].X + " , " + (int)dstpt[(int)Position.BR].Y + "]";

            Cross2DF dstCross1 = new Cross2DF(dstpt[(int)Position.BL], 5, 5);
            Cross2DF dstCross2 = new Cross2DF(dstpt[(int)Position.TL], 5, 5);
            Cross2DF dstCross3 = new Cross2DF(dstpt[(int)Position.TR], 5, 5);
            Cross2DF dstCross4 = new Cross2DF(dstpt[(int)Position.BR], 5, 5);

            imageTransform.Draw(dstCross1, new Bgr(Color.Orange), 2);
            imageTransform.Draw(dstCross2, new Bgr(Color.Orange), 2);
            imageTransform.Draw(dstCross3, new Bgr(Color.Orange), 2);
            imageTransform.Draw(dstCross4, new Bgr(Color.Orange), 2);

            imgImageBox.Image = imageTransform;//.Resize(_frameWidth, _frameHeight);

            PointF[] dstTri = dstpt;//new PointF[] { dstpt[Position.BL], dstpt[Position.TL], dstpt[Position.TR], dstpt[Position.BL] };

            //CvInvoke.cvGetAffineTransform(srcTri, dstTri, warpMat);
            CvInvoke.cvGetPerspectiveTransform(srcTri, dstTri, warpMat);

            //INVERT_METHOD method = new INVERT_METHOD cv;
            CvInvoke.cvInvert(warpMat, invWarpMat, Emgu.CV.CvEnum.INVERT_METHOD.CV_LU);

            MCvScalar fillvar = new MCvScalar(0);
            imageBoxPers.Image = new Image<Bgr, byte>(_frameWidth, _frameHeight, new Bgr(0, 0, 0));
            //CvInvoke.cvWarpAffine(imgImageBox.Image.Ptr, imageBoxPers.Image.Ptr, warpMat, 0, fillvar);
            CvInvoke.cvWarpPerspective(imgImageBox.Image.Ptr, imageBoxPers.Image.Ptr, warpMat, 0, fillvar);
            //CvInvoke.cvWarpPerspective(imgImageBox.Image.Ptr, imagePerspective.Ptr, warpMat, 0, fillvar);
            imagePerspective = new Image<Bgr,byte>(imageBoxPers.Image.Bitmap);
            //imageBoxPers.Image = imagePerspective;
        }
        private void btnDrawPt_Click(object sender, EventArgs e)
        {
            int PtX;
            int PtY;
            Int32.TryParse(txtPtX.Text, out PtX);
            Int32.TryParse(txtPtY.Text, out PtY);

            //lblTouchPt.Text = (PtY / tableTopHeight) * ((int)dstpt[(int)Position.BL].Y - (int)dstpt[(int)Position.TL].Y) + (int)dstpt[(int)Position.BL].Y;
            //lblTouchPt.Text = (PtX / tableTopHeight) * ((int)dstpt[(int)Position.BL].X - (int)dstpt[(int)Position.TL].X) + (int)dstpt[(int)Position.BL].X;

            float percentFromLeft = (float) PtX / tableTopWidth;
            float percentFromBottom = (float) PtY / tableTopHeight;

            PointF pointToDraw = new PointF();
            pointToDraw.X = (dstpt[(int)Position.BR].X - dstpt[(int)Position.BL].X) * percentFromLeft + dstpt[(int)Position.BL].X;
            pointToDraw.Y = (dstpt[(int)Position.TL].Y - dstpt[(int)Position.BL].Y) * percentFromBottom + dstpt[(int)Position.BL].Y;

            Cross2DF crossToDraw = new Cross2DF(pointToDraw, 5, 5);
            Image<Bgr, Byte> img = new Image<Bgr, byte>(imageBoxPers.Image.Bitmap);
            img.Draw(crossToDraw, new Bgr(Color.Purple), 3);
            imageBoxPers.Image = img;

            Matrix<double> m = new Matrix<double>(1, 1, 2);
            Matrix<double> n = new Matrix<double>(1, 1, 2);

            //double[] data = { pointToDraw.X, pointToDraw.Y };
            m.Data[0, 0] = pointToDraw.X;
            m.Data[0, 1] = pointToDraw.Y;

            CvInvoke.cvPerspectiveTransform(m, n, invWarpMat.Ptr);

            PointF pointToDrawT = new PointF();

            pointToDrawT.X = (float)n.Data[0, 0];
            pointToDrawT.Y = (float)n.Data[0, 1];

            Cross2DF scrCrossTest2 = new Cross2DF(pointToDrawT, 5, 5);
            imageTransform.Draw(scrCrossTest2, new Bgr(Color.Purple), 2);
            imgImageBox.Image = imageTransform;//.Resize(_frameWidth, _frameHeight);
        }
        private void processImageFrisbee(object sender, EventArgs arg)
        {
            int FrisbeeLeft_X = 0;
             int FrisbeeLeft_Y = 0;
             int FrisbeeRight_X = 0;
             int FrisbeeRight_Y = 0;
             int hMin = H_MIN;
             int hMax = H_MAX;
             int sMin = S_MIN;
             int sMax = S_MAX;
             int vMin = V_MIN;
             int vMax = V_MAX;
             Image<Bgr, Byte> image = capture.QueryFrame();
             Image<Hsv, byte> hsvImage;
             Image<Gray, byte> mask;
             Image<Gray, byte> leftFrisbee;
             Image<Gray, byte> rightFrisbee;

             try
             {
            // Convert the image to hsv
            hsvImage = image.Convert<Hsv, byte>();

            #if USE_SLIDERS
            // Threshold the HSV Image for targets
            readTrackBarDelegate readBar = new readTrackBarDelegate(readTrackBar);

            // read the track bar positions
            hMin = (int) this.Invoke(readBar, bar_hMin);
            hMax = (int) this.Invoke(readBar, bar_hMax);
            sMin = (int) this.Invoke(readBar, bar_sMin);
            sMax = (int) this.Invoke(readBar, bar_sMax);
            vMin = (int) this.Invoke(readBar, bar_vMin);
            vMax = (int) this.Invoke(readBar, bar_vMax);
            #endif

            // mask the image for the Frisbees
            mask = hsvImage.InRange(new Hsv(hMin, sMin, vMin), new Hsv(hMax, sMax, vMax));

            // Filter image
            mask._Dilate(4);
            mask._Erode(4);

            Matrix<float> imgMatHorizontal = new Matrix<float>(HEIGHT, 1, 1);

            int FrisbeeMidPoint = findMidPointBetweenFrisbees(mask);

            // find the left Frisbee X position
            FrisbeeLeft_X = findMeanIndex(0, FrisbeeMidPoint, m_sum_col);

            // find the left Frisbee X position
            FrisbeeRight_X = findMeanIndex(FrisbeeMidPoint, WIDTH, m_sum_col);

            // get an image of the left Frisbee
            Rectangle leftfrisbeeRect = new Rectangle(0, 0, FrisbeeMidPoint, HEIGHT);
            leftFrisbee = mask.Copy(leftfrisbeeRect);

            // sum in the horizontal direction for Y postion
            leftFrisbee.Reduce<float>(imgMatHorizontal, Emgu.CV.CvEnum.REDUCE_DIMENSION.SINGLE_COL, Emgu.CV.CvEnum.REDUCE_TYPE.CV_REDUCE_SUM);

            // read the horizontal sum
            for (int horIndex = 0; horIndex < HEIGHT; horIndex++)
               m_sum_row[horIndex] = (long) imgMatHorizontal[horIndex, 0];

            // find the Y position
            FrisbeeLeft_Y = findMeanIndex(0, HEIGHT, m_sum_row);

            // get an image of the right Frisbee
            Rectangle rightfrisbeeRect = new Rectangle(FrisbeeMidPoint, 0, WIDTH - FrisbeeMidPoint, HEIGHT);
            rightFrisbee = mask.Copy(rightfrisbeeRect);

            // sum in the horizontal direction for Y postion
            rightFrisbee.Reduce<float>(imgMatHorizontal, Emgu.CV.CvEnum.REDUCE_DIMENSION.SINGLE_COL, Emgu.CV.CvEnum.REDUCE_TYPE.CV_REDUCE_SUM);

            // read the horizontal sum
            for (int horIndex = 0; horIndex < HEIGHT; horIndex++)
               m_sum_row[horIndex] = (long) imgMatHorizontal[horIndex, 0];

            // find the Y position
            FrisbeeRight_Y = findMeanIndex(0, HEIGHT, m_sum_row);

            graphFloatArray(zgc_col, "Sum Row", m_sum_row, m_sum_row.Length, true);
            graphFloatArray(zgc_row, "Sum Column", m_sum_col, m_sum_col.Length, true);

            // show the Frisbee in the image (X, Y)
            Bgr bgr = new Bgr(Color.Red);

            PointF pointA = new PointF(FrisbeeLeft_X, FrisbeeLeft_Y);
            Cross2DF cross2DFA = new Cross2DF(pointA, 10, 10);
            image.Draw(cross2DFA, bgr, 2);

            PointF pointB = new PointF(FrisbeeRight_X, FrisbeeRight_Y);
            Cross2DF cross2DFB = new Cross2DF(pointB, 10, 10);
            image.Draw(cross2DFB, bgr, 2);

            // Print out the information about the first rect
            //if (boxList.Count != 0)
            //{
            //   double offset = (boxList[centerRectNum].X + boxList[centerRectNum].Width / 2.0 - imageWidth / 2);
            //   NetworkTable.getTable("SmartDashboard").putNumber("Frisbee Offset", offset);
            //   NetworkTable.getTable("SmartDashboard").putNumber("Frisbee Size", boxList[centerRectNum].Width);
            //}
            //else
            //{
            //   NetworkTable.getTable("SmartDashboard").putNumber("Frisbee Offset", 0);
            //   NetworkTable.getTable("SmartDashboard").putNumber("Frisbee Size", 320);  //assume it is too big to see
            //}

            im_mask.Image = mask.ToBitmap();
            im_image.Image = image.ToBitmap();

            //Thread.Sleep(500);
             }
             catch
             {
             }
        }