private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = new Mat();

            capture.Retrieve(frame, 0);
            //preprocessing
            Image <Bgr, byte>  finalImg      = frame.ToImage <Bgr, byte>().Flip(FlipType.Horizontal);
            Image <Gray, byte> processingImg = finalImg.Convert <Gray, byte>();

            BiTonalLevel.Dispatcher.BeginInvoke(new Action(() =>
            {
                if (BiTonalLevel.Value > 0)
                {
                    processingImg = processingImg.ThresholdBinary(new Gray(BiTonalLevel.Value), new Gray(255));
                }
            }));
            BlurLevel.Dispatcher.BeginInvoke(new Action(() =>
            {
                if (BlurLevel.Value > 1)
                {
                    CvInvoke.Blur(processingImg, processingImg, new System.Drawing.Size((int)BlurLevel.Value, (int)BlurLevel.Value), new System.Drawing.Point(-1, -1));
                }
            }));
            //morphological processing
            processingImg.MorphologyEx(firstMorphOp, kernel, new System.Drawing.Point(-1, -1), firstMorphSteps, BorderType.Default, new MCvScalar());
            if (doubleMorph)
            {
                processingImg.MorphologyEx(secondMorphOp, kernel2, new System.Drawing.Point(-1, -1), secondMorphSteps, BorderType.Default, new MCvScalar());
            }
            ProcessingVideoBox.Dispatcher.BeginInvoke(new Action(() => ProcessingVideoBox.Source = ToBitmapGrey(processingImg)));
            //edge detection
            Mat edges = new Mat(frame.Size, frame.Depth, 1);

            CvInvoke.Canny(processingImg, edges, lowerTresholdLevel, upperTresholdLevel, cannyKernelSize);
            //contours finding
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat    hierarchy             = new Mat();
            int    largest_contour_index = 0;
            double largest_area          = 0;

            CvInvoke.FindContours(edges, contours, hierarchy, contouringMode, contouringMethod);
            for (int i = 0; i < contours.Size; i++)
            {
                double a = CvInvoke.ContourArea(contours[i], false);
                if (a > largest_area)
                {
                    largest_area          = a;
                    largest_contour_index = i;
                }
            }
            CvInvoke.DrawContours(finalImg, contours, largest_contour_index, redColor, 3, LineType.EightConnected, hierarchy);
            //defects points finding
            VectorOfInt hull    = new VectorOfInt();
            Mat         defects = new Mat();

            if (contours.Size > 0)
            {
                VectorOfPoint largestContour = new VectorOfPoint(contours[largest_contour_index].ToArray());
                CvInvoke.ConvexHull(largestContour, hull, false, true);
                CvInvoke.ConvexityDefects(largestContour, hull, defects);
                if (!defects.IsEmpty)
                {
                    Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels);
                    defects.CopyTo(m);
                    Matrix <int>[] channels = m.Split();
                    for (int i = 1; i < defects.Rows; ++i)
                    {
                        finalImg.Draw(new System.Drawing.Point[] { largestContour[channels[0][i, 0]], largestContour[channels[1][i, 0]] }, new Bgr(100, 255, 100), 2);
                        CvInvoke.Circle(finalImg, new System.Drawing.Point(largestContour[channels[0][i, 0]].X, largestContour[channels[0][i, 0]].Y), 7, new MCvScalar(255, 0, 0), -1);
                    }
                }
            }
            MainVideoBox.Dispatcher.BeginInvoke(new Action(() => MainVideoBox.Source = ToBitmapFinal(finalImg)));
        }
Exemplo n.º 2
0
        private void btnCapture_Click(object sender, EventArgs e)
        {
            if (!(rdbFront.Checked || rdbRight.Checked))
            {
                MessageBox.Show("先选择使用的图片来源!", "取像提示");
                return;
            }
            else if (rdbRight.Checked)
            {
                cameraID = 0;//右侧相机
                FlashLogger.Info("当前图像时右侧-高位相机");
            }
            else
            {
                cameraID = 1;//前方向机
                FlashLogger.Info("当前图像时前侧-低位相机");
            }
            Image <Bgr, byte> myImg          = null;
            OpenFileDialog    openFileDialog = new OpenFileDialog();

            if (openFileDialog.ShowDialog() == DialogResult.OK)
            {
                myImg = new Image <Bgr, byte>(openFileDialog.FileName);
            }
            if (myImg == null)
            {
                return;
            }

            imgInfo      = CenterAndSlope.GetProductParamters(myImg.Bitmap, cameraID, 165);
            bgrBinaryImg = new Image <Bgr, byte>(CenterAndSlope.BinaryImage.Bitmap);
            foreach (var item in imgInfo.ImageCorner)
            {
                CvInvoke.Circle(myImg, new Point((int)item.X, (int)item.Y), 10, new MCvScalar(0, 255, 33), 10);
            }
            #region 可视化
            //画点
            Dictionary <Point, int> upLine     = new Dictionary <Point, int>();
            Dictionary <Point, int> bottomLine = new Dictionary <Point, int>();
            Dictionary <Point, int> rightLine  = new Dictionary <Point, int>();
            Dictionary <Point, int> leftLine   = new Dictionary <Point, int>();
            //foreach (var item in CenterAndSlope.segmentLines)
            //{
            //    if (item.Values.Where(a=>a=="Up"))
            //    {
            //upLine = CenterAndSlope.segmentUpLines.Keys.ToArray()[0];
            //    }
            //    else if (item.Value == "Bottom")
            //    {
            bottomLine = CenterAndSlope.segmentBottomLines.Keys.ToArray()[0];
            foreach (var item in bottomLine)
            {
                File.AppendAllText("bottomLine.txt", "X = " + item.Key.X.ToString() + "\tY = " + item.Key.Y.ToString() + "\tID = " + item.Value.ToString() + "\r\n");
            }
            //    }
            //    else if (item.Value == "Right")
            //    {
            //rightLine = CenterAndSlope.segmentRightLines.Keys.ToArray()[0];
            //    }
            //    else
            //    {


            //leftLine = CenterAndSlope.segmentLeftLines.Keys.ToArray()[0];
            //foreach (var item in leftLine)
            //{
            //    File.AppendAllText("leftLine.txt", "X = " + item.Key.X.ToString() + "Y = " + item.Key.Y.ToString() + "ID = " + item.Value.ToString() + "\r\n");
            //}

            //    }
            //}

            FlashLogger.Info("Up-Num =>" + upLine.Count.ToString());
            FlashLogger.Info("Bottom-Num =>" + bottomLine.Count.ToString());
            FlashLogger.Info("Right-Num =>" + rightLine.Count.ToString());
            FlashLogger.Info("Left-Num =>" + leftLine.Count.ToString());
            CvInvoke.Circle(myImg, new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), 10, new MCvScalar(255, 25, 100), 10);
            //=========BOTTOM==================
            foreach (var item in bottomLine.Where(a => a.Value == 0))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 0, 255), 3);//Red
                CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 0, 255), 3);
                CvInvoke.Line(bgrBinaryImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 0, 255), 3);
            }
            foreach (var item in bottomLine.Where(a => a.Value == 1))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 255, 255), 3);//Yellow
                CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 255, 255), 3);
                CvInvoke.Line(bgrBinaryImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 255, 255), 3);
            }
            foreach (var item in bottomLine.Where(a => a.Value == 2))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 255, 5), 3);//Green
                CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 255, 5), 3);
                CvInvoke.Line(bgrBinaryImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 255, 5), 3);
            }
            foreach (var item in bottomLine.Where(a => a.Value == 3))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(255, 0, 0), 3);//Blue
                CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 0, 0), 3);
                CvInvoke.Line(bgrBinaryImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 0, 0), 3);
            }
            foreach (var item in bottomLine.Where(a => a.Value == 4))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(255, 10, 255), 3);//Blue
                CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 10, 255), 3);
                CvInvoke.Line(bgrBinaryImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 10, 255), 3);
            }

            ////++++++++++++up++++++++++++++++++++
            //foreach (var item in CenterAndSlope.DividedContours.Where(a => a.Value == 0).ToDictionary(a => a.Key, a => a.Value))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 0, 255), 3);//Red
            //    //CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 0, 255), 3);
            //}
            //foreach (var item in upLine.Where(a => a.Value == 1))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 255, 255), 3);//Yellow
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 255, 255), 3);
            //}
            //foreach (var item in upLine.Where(a => a.Value == 2))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 255, 5), 3);//Green
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 255, 5), 3);
            //}
            //foreach (var item in upLine.Where(a => a.Value == 3))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(255, 0, 0), 3);//Blue
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 0, 0), 3);
            //}
            //foreach (var item in upLine.Where(a => a.Value == 4))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(255, 120, 255), 3);//Blue
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 120, 255), 3);
            //}
            ////++++++++++++Right++++++++++++++++++++
            //foreach (var item in rightLine.Where(a => a.Value == 0))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 0, 255), 3);//Red
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 0, 255), 3);
            //}
            //foreach (var item in rightLine.Where(a => a.Value == 1))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 255, 255), 3);//Yellow
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 255, 255), 3);
            //}
            //foreach (var item in rightLine.Where(a => a.Value == 2))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 255, 5), 3);//Green
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 255, 5), 3);
            //}
            //foreach (var item in rightLine.Where(a => a.Value == 3))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(255, 0, 0), 3);//Blue
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 0, 0), 3);
            //}
            //foreach (var item in rightLine.Where(a => a.Value == 4))
            //{
            //    CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(255, 120, 255), 3);//Blue
            //    CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 120, 255), 3);
            //}
            //++++++++++++Left++++++++++++++++++++
            foreach (var item in leftLine.Where(a => a.Value == 0))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 0, 255), 3);//Red
                CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(0, 0, 255), 3);
            }
            foreach (var item in leftLine.Where(a => a.Value == 1))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 255, 255), 3);//Yellow
            }
            foreach (var item in leftLine.Where(a => a.Value == 2))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(0, 255, 5), 3);//Green
            }
            foreach (var item in leftLine.Where(a => a.Value == 3))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(255, 0, 0), 3);//Blue
                CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 0, 0), 3);
            }
            foreach (var item in leftLine.Where(a => a.Value == 4))
            {
                CvInvoke.Circle(myImg, new Point(item.Key.X, item.Key.Y), 5, new MCvScalar(255, 120, 255), 3);//Blue
                CvInvoke.Line(myImg, new Point(item.Key.X, item.Key.Y), new Point((int)imgInfo.CenterOfImg.X, (int)imgInfo.CenterOfImg.Y), new MCvScalar(255, 120, 255), 3);
            }

            #endregion
            for (int i = 0; i < imgInfo.MotorShift.Length; i++)
            {
                textBox1.Text += "#【" + i + "】" + imgInfo.MotorShift[i].ToString() + "\r\n";
            }
            //x=0
            int y0    = (int)(imgInfo.CenterOfImg.Y - Math.Tan(imgInfo.RotatedAngle / 180f * Math.PI) * imgInfo.CenterOfImg.X);
            int y5472 = (int)(imgInfo.CenterOfImg.Y + Math.Tan(imgInfo.RotatedAngle / 180f * Math.PI) * (5472 - imgInfo.CenterOfImg.X));
            CvInvoke.Line(myImg, new Point(0, y0), new Point(5472, y5472), new MCvScalar(23, 25, 200), 10);
            CvInvoke.Line(bgrBinaryImg, new Point(0, y0), new Point(5472, y5472), new MCvScalar(0, 0, 255), 10);
            //y=0
            int x0    = (int)(imgInfo.CenterOfImg.X + Math.Tan(imgInfo.RotatedAngle / 180f * Math.PI) * imgInfo.CenterOfImg.Y);
            int x3648 = (int)(imgInfo.CenterOfImg.X - Math.Tan(imgInfo.RotatedAngle / 180f * Math.PI) * (3648 - imgInfo.CenterOfImg.Y));
            CvInvoke.Line(myImg, new Point(x0, 0), new Point(x3648, 3648), new MCvScalar(0, 0, 255), 10);
            CvInvoke.Line(bgrBinaryImg, new Point(x0, 0), new Point(x3648, 3648), new MCvScalar(0, 0, 255), 10);


            //x=0
            int y0rect    = (int)(imgInfo.RectCenterOfImg.Y - Math.Tan(imgInfo.RectRotatedAngle / 180f * Math.PI) * imgInfo.RectCenterOfImg.X);
            int y5472rect = (int)(imgInfo.RectCenterOfImg.Y + Math.Tan(imgInfo.RectRotatedAngle / 180f * Math.PI) * (5472 - imgInfo.RectCenterOfImg.X));
            CvInvoke.Line(myImg, new Point(0, y0rect), new Point(5472, y5472rect), new MCvScalar(225, 0, 0), 2);
            CvInvoke.Line(bgrBinaryImg, new Point(0, y0rect), new Point(5472, y5472rect), new MCvScalar(225, 00), 2);
            //y=0
            int x0rect    = (int)(imgInfo.RectCenterOfImg.X + Math.Tan(imgInfo.RectRotatedAngle / 180f * Math.PI) * imgInfo.RectCenterOfImg.Y);
            int x3648rect = (int)(imgInfo.RectCenterOfImg.X - Math.Tan(imgInfo.RectRotatedAngle / 180f * Math.PI) * (3648 - imgInfo.RectCenterOfImg.Y));
            CvInvoke.Line(myImg, new Point(x0rect, 0), new Point(x3648rect, 3648), new MCvScalar(255, 0, 0), 10);
            CvInvoke.Line(bgrBinaryImg, new Point(x0rect, 0), new Point(x3648rect, 3648), new MCvScalar(255, 0, 0), 10);

            FlashLogger.Info("X-Y均值计算质心:" + imgInfo.CenterOfImg.ToString() + "\r\n外接矩形中心:" + imgInfo.RectCenterOfImg.ToString() + "\r\nHu矩计算质心:" + imgInfo.GravityCenterOfImg.ToString() + "\r\n");;

            FlashLogger.Info("最小二乘法拟合直线斜率:" + imgInfo.RotatedAngle.ToString() + "\r\n外接矩形斜率:" + imgInfo.RectRotatedAngle.ToString() + "\r\n");
            FlashLogger.Info("24电机位移\r\n" + textBox1.Text);
            pictureBox1.Image = myImg.ToBitmap();
            pictureBox1.Update();
        }
Exemplo n.º 3
0
        /// <summary>
        /// Count number of fingers on skinMask and draw debug information
        /// </summary>
        /// <param name="skinMask">Skin mask to count fingers on</param>
        /// <returns>Mat with detection debug information</returns>
        public Mat FindFingersCount(Mat skinMask)
        {
            Mat contoursImage = Mat.Ones(skinMask.Height, skinMask.Width, DepthType.Cv8U, 3);

            if (skinMask.IsEmpty || skinMask.NumberOfChannels != 1)
            {
                return(contoursImage);
            }

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();

            CvInvoke.FindContours(skinMask, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);

            if (contours.Size <= 0)
            {
                return(contoursImage);
            }

            int    biggestContourIndex = -1;
            double biggestArea         = 0;

            for (int i = 0; i < contours.Size; i++)
            {
                double area = CvInvoke.ContourArea(contours[i], false);
                if (area > biggestArea)
                {
                    biggestArea         = area;
                    biggestContourIndex = i;
                }
            }
            if (biggestContourIndex < 0)
            {
                return(contoursImage);
            }

            VectorOfPoint hullPoints = new VectorOfPoint();
            VectorOfInt   hullInts   = new VectorOfInt();


            CvInvoke.ConvexHull(contours[biggestContourIndex], hullPoints, true);
            CvInvoke.ConvexHull(contours[biggestContourIndex], hullInts, false);

            Mat defects = new Mat();

            if (hullInts.Size > 3)
            {
                CvInvoke.ConvexityDefects(contours[biggestContourIndex], hullInts, defects);
            }
            else
            {
                return(contoursImage);
            }

            Rectangle boundingRectangle = CvInvoke.BoundingRectangle(hullPoints);

            Point         centerBoundingRectangle = new Point((boundingRectangle.X + boundingRectangle.Right) / 2, (boundingRectangle.Y + boundingRectangle.Bottom) / 2);
            VectorOfPoint startPoints             = new VectorOfPoint();
            VectorOfPoint farPoints = new VectorOfPoint();

            int[,,] defectsData = (int[, , ])defects.GetData();
            for (int i = 0; i < defectsData.Length / 4; i++)
            {
                Point startPoint = contours[biggestContourIndex][defectsData[i, 0, 0]];
                if (!startPoints.ToArray().ToList().Any(p => Math.Abs(p.X - startPoint.X) < 30 && Math.Abs(p.Y - startPoint.Y) < 30))
                {
                    VectorOfPoint startPointVector = new VectorOfPoint(new Point[] { startPoint });
                    startPoints.Push(startPointVector);
                }
                Point farPoint = contours[biggestContourIndex][defectsData[i, 0, 2]];
                if (findPointsDistance(farPoint, centerBoundingRectangle) < boundingRectangle.Height * BOUNDING_RECT_FINGER_SIZE_SCALING)
                {
                    VectorOfPoint farPointVector = new VectorOfPoint(new Point[] { farPoint });
                    farPoints.Push(farPointVector);
                }
            }


            VectorOfPoint filteredStartPoints = CompactOnNeighborhoodMedian(startPoints, boundingRectangle.Height * BOUNDING_RECT_NEIGHBOR_DISTANCE_SCALING);
            VectorOfPoint filteredFarPoints   = CompactOnNeighborhoodMedian(farPoints, boundingRectangle.Height * BOUNDING_RECT_NEIGHBOR_DISTANCE_SCALING);

            VectorOfPoint filteredFingerPoints = new VectorOfPoint();

            if (filteredFarPoints.Size > 1)
            {
                VectorOfPoint fingerPoints = new VectorOfPoint();

                for (int i = 0; i < filteredStartPoints.Size; i++)
                {
                    VectorOfPoint closestPoints = findClosestOnX(filteredFarPoints, filteredStartPoints[i]);

                    if (isFinger(closestPoints[0], filteredStartPoints[i], closestPoints[1], LIMIT_ANGLE_INF, LIMIT_ANGLE_SUP, centerBoundingRectangle, boundingRectangle.Height * BOUNDING_RECT_FINGER_SIZE_SCALING))
                    {
                        fingerPoints.Push(new Point[] { filteredStartPoints[i] });
                    }
                }
                if (fingerPoints.Size > 0)
                {
                    while (fingerPoints.Size > 5)
                    {
                        //Remove extra fingers
                        //Convert to list and remove last item
                        List <Point> points = new List <Point>(fingerPoints.ToArray());
                        points.Remove(points.Last());
                        fingerPoints = new VectorOfPoint(points.ToArray());
                    }
                    for (int i = 0; i < fingerPoints.Size - 1; i++)
                    {
                    }
                    filteredFingerPoints       = fingerPoints;
                    this.NumberOfFingersRaised = filteredFingerPoints.Size;
                }
            }

            Bgr colorRed    = new Bgr(Color.Red);
            Bgr colorGreen  = new Bgr(Color.Green);
            Bgr colorBlue   = new Bgr(Color.Blue);
            Bgr colorYellow = new Bgr(Color.Yellow);
            Bgr colorPurple = new Bgr(Color.Purple);
            Bgr colorWhite  = new Bgr(Color.White);

            //Debug, draw defects
            defectsData = (int[, , ])defects.GetData();
            for (int i = 0; i < defectsData.Length / 4; i++)
            {
                Point start = contours[biggestContourIndex][defectsData[i, 0, 0]];
                Point far   = contours[biggestContourIndex][defectsData[i, 0, 2]];
                Point end   = contours[biggestContourIndex][defectsData[i, 0, 1]];

                CvInvoke.Polylines(contoursImage, new Point[] { start, far, end }, true, colorPurple.MCvScalar, DRAW_THICKNESS / 2);
                CvInvoke.Circle(contoursImage, start, 5, colorWhite.MCvScalar);
                CvInvoke.Circle(contoursImage, far, 5, colorRed.MCvScalar, 10);
                CvInvoke.Circle(contoursImage, end, 5, colorBlue.MCvScalar);
            }

            //Draw information about what was detected (Contours, key points, fingers / how many fingers)
            CvInvoke.DrawContours(contoursImage, contours, 0, colorGreen.MCvScalar, DRAW_THICKNESS, LineType.AntiAlias);
            CvInvoke.Polylines(contoursImage, hullPoints, true, colorBlue.MCvScalar, DRAW_THICKNESS);
            CvInvoke.Rectangle(contoursImage, boundingRectangle, colorRed.MCvScalar, DRAW_THICKNESS);
            CvInvoke.Circle(contoursImage, centerBoundingRectangle, 5, colorYellow.MCvScalar, DRAW_THICKNESS);
            drawVectorPoints(contoursImage, filteredStartPoints, colorRed.MCvScalar, true, 3);
            drawVectorPoints(contoursImage, filteredFarPoints, colorWhite.MCvScalar, true, 3);
            drawVectorPoints(contoursImage, filteredFingerPoints, colorYellow.MCvScalar, false, 3);
            CvInvoke.PutText(contoursImage, filteredFingerPoints.Size.ToString(), centerBoundingRectangle, FontFace.HersheyComplex, 2, colorYellow.MCvScalar);


            return(contoursImage);
        }
Exemplo n.º 4
0
        private void ProcessFrameMP4(object sender, EventArgs e)
        {
            px = new Point(px1, px2);
            py = new Point(py1, py2);

            if (cap != null)
            {
                cap.Retrieve(frame, 0);
                currentframe = frame.ToImage <Bgr, byte>();


                Mat mask = new Mat();
                sub.Apply(currentframe, mask);

                Mat kernelOp = new Mat();
                Mat kernelCl = new Mat();
                Mat kernelEl = new Mat();
                Mat Dilate   = new Mat();
                kernelOp = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));
                kernelCl = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(11, 11), new Point(-1, -1));
                var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));

                CvInvoke.GaussianBlur(mask, mask, new Size(13, 13), 1.5);
                CvInvoke.MorphologyEx(mask, mask, MorphOp.Open, kernelOp, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
                CvInvoke.MorphologyEx(mask, mask, MorphOp.Close, kernelCl, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
                CvInvoke.Dilate(mask, mask, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Threshold(mask, mask, 127, 255, ThresholdType.Binary);

                detect.Detect(mask.ToImage <Gray, byte>(), blobs);
                blobs.FilterByArea(500, 20000);
                tracks.Update(blobs, 20.0, 1, 10);

                Image <Bgr, byte> result = new Image <Bgr, byte>(currentframe.Size);
                using (Image <Gray, Byte> blobMask = detect.DrawBlobsMask(blobs))
                {
                    frame.CopyTo(result, blobMask);
                }
                CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 0, 255), 2);

                foreach (KeyValuePair <uint, CvTrack> pair in tracks)
                {
                    if (pair.Value.Inactive == 0) //only draw the active tracks.
                    {
                        int cx = Convert.ToInt32(pair.Value.Centroid.X);
                        int cy = Convert.ToInt32(pair.Value.Centroid.Y);

                        CvBlob b     = blobs[pair.Value.BlobLabel];
                        Bgr    color = detect.MeanColor(b, frame.ToImage <Bgr, Byte>());
                        result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, FontFace.HersheySimplex, 0.5, color);
                        currentframe.Draw(pair.Value.BoundingBox, new Bgr(0, 0, 255), 1);
                        Point[] contour = b.GetContour();
                        //result.Draw(contour, new Bgr(0, 0, 255), 1);

                        Point center = new Point(cx, cy);
                        CvInvoke.Circle(currentframe, center, 1, new MCvScalar(255, 0, 0), 2);

                        if (center.Y <= px.Y + 10 && center.Y > py.Y - 10 && center.X <= py.X && center.X > px.X)
                        {
                            if (pair.Key.ToString() != "")
                            {
                                if (!carid.Contains(pair.Key.ToString()))
                                {
                                    carid.Add(pair.Key.ToString());
                                    if (carid.Count == 20)
                                    {
                                        carid.Clear();
                                    }

                                    carcount++;

                                    if (carcount != countBrd + 1 && carcount != countBrd + 2 && carcount != countBrd + 3 && carcount != countBrd + 4 && carcount != countBrd + 5)
                                    {
                                        //Json Logger
                                        Logs log = new Logs()
                                        {
                                            Date = DateTime.Now.ToString(),
                                            Id   = carcount
                                        };
                                        string strResultJson = JsonConvert.SerializeObject(log);
                                        File.AppendAllText(cfg.LogSavePath + @"\log.json", strResultJson + Environment.NewLine);
                                    }
                                }
                            }

                            CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 255, 0), 2);
                        }
                    }
                }


                CvInvoke.PutText(currentframe, "Count :" + carcount.ToString(), new Point(10, 25), FontFace.HersheySimplex, 1, new MCvScalar(255, 0, 255), 2, LineType.AntiAlias);
                //Frame Rate
                double framerate = cap.GetCaptureProperty(CapProp.Fps);
                Thread.Sleep((int)(1000.0 / framerate));
                if (firstCount == false && carcount == countBrd)
                {
                    Image_Name = cfg.PhotoSavePath + @"\" + "Car" + DateTime.Now.ToString("dd-mm-yyyy-hh-mm-ss") + ".jpg";
                    currentframe.Save(Image_Name);
                    sendMail = new Thread(SendMail);
                    sendMail.Start();
                    firstCount = true;
                }



                if (isRecording)
                {
                    if (firstFrameTime != null)
                    {
                        writer.WriteVideoFrame(currentframe.Bitmap, DateTime.Now - firstFrameTime.Value);
                    }
                    else
                    {
                        writer.WriteVideoFrame(currentframe.Bitmap);
                        firstFrameTime = DateTime.Now;
                    }
                }

                //pictureBox1.SizeMode = PictureBoxSizeMode.StretchImage;
                pictureBox1.Image = currentframe.Bitmap;
            }
        }
Exemplo n.º 5
0
        public void StartProcessing(Config cfg)
        {
            try
            {
                ClrImg = ClrOriginalImg.Copy();
                if (BaseImg == null)
                {
                    return;
                }

                var img = BaseImg.ThresholdBinary(new Gray(cfg.Threshold), new Gray(255));
                //.SmoothMedian( (int)cfg.Resolution * 10 + 1 );
                var contours = new VectorOfVectorOfPoint();

                CvInvoke.FindContours(img, contours, null, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);

                var cntrColorSet = CreateContour_ColorSet(Outercolor, Innercolor, cfg, contours);

                List <VectorOfPoint> cntrlist  = cntrColorSet.Item1;
                List <MCvScalar>     colorlist = cntrColorSet.Item2;


                var centers    = FindCenter(cntrlist);
                var centerlist = new List <double[]>();

                double textRatio = Math.Pow(Math.E, RatioW);

                List <double[]> xys = new List <double[]>();

                for (int i = 0; i < centers.Count(); i++)
                {
                    CvInvoke.Circle(ClrImg, centers[i], 5, colorlist[i]);
                    CvInvoke.Circle(ClrImg, centers[i], (int)(5 * RatioW), colorlist[i], thickness: RatioW > 1 ? (int)RatioW : 1);
                    var realx = (centers[i].X * cfg.Resolution);
                    var realy = (centers[i].Y * cfg.Resolution);

                    centerlist.Add(new double[] { realx, realy });
                }


                double xerror;
                double yerror;
                if (cfg.UseLine)
                {
                    var slope1 = (cfg.HY2 - cfg.HY1) / (cfg.HX2 - cfg.HX1);
                    var bias1  = cfg.HY1 - slope1 * cfg.HX1;

                    var slope2 = (cfg.WY2 - cfg.WY1) / (cfg.WX2 - cfg.WX1);
                    var bias2  = cfg.WY1 - slope2 * cfg.WX1;

                    var crossx = (bias2 - bias1) / (slope1 - slope2);
                    var crossy = slope1 * ((bias2 - bias1) / (slope1 - slope2)) + bias1;

                    var posx = (int)(crossx * RatioW);
                    var posy = (int)(crossy * RatioH);

                    centers.Add(new System.Drawing.Point(posx, posy));

                    var crossx_cvs = crossx * RatioW * cfg.Resolution;
                    var crossy_cvs = crossy * RatioH * cfg.Resolution;

                    centerlist.Add(new double[] { crossx_cvs, crossy_cvs });

                    CvInvoke.Circle(ClrImg, centers.Last(), 5, colorlist.Last());
                    CvInvoke.Circle(ClrImg, centers.Last(), (int)(5 * RatioW), colorlist.Last(), thickness: RatioW > 1 ? (int)RatioW : 1);
                }

                xerror = Math.Abs(centerlist[0][0] - centerlist[1][0]);
                yerror = Math.Abs(centerlist[0][1] - centerlist[1][1]);

                string xyerror = string.Format("X Error : {0}  ,  Y Error : {1}", xerror.ToString("F4"), yerror.ToString("F4"));
                System.Drawing.Point textposXY = new System.Drawing.Point(centers[0].X - (int)(40 * RatioW), centers[0].Y - (int)(10 * RatioH));
                CvInvoke.PutText(ClrImg, xyerror, textposXY, FontFace.HersheySimplex, RatioW / 2.0, new MCvScalar(53, 251, 32), thickness: (int)(2 * RatioW));


                double errorDistance = CalcDistance(centerlist);
                ClrImg = CenterDiffDraw(centers, ClrImg);

                System.Drawing.Point textdifpos = new System.Drawing.Point(centers[0].X + (int)(40 * RatioW), centers[0].Y + (int)(10 * RatioH));
                CvInvoke.PutText(ClrImg, "Error : " + errorDistance.ToString("F4") + " (um)", textdifpos, FontFace.HersheySimplex, RatioW / 2.0, new MCvScalar(153, 51, 153), thickness: (int)(2 * RatioW));

                var res = ToBitmapSource(ClrImg);
                evtProcessedImg(res);
                evtDistance(errorDistance);
            }
            catch (Exception er)
            {
                er.ToString().Print();
            }
        }
Exemplo n.º 6
0
 /// <summary>
 /// Draws a simple or filled circle with given center and radius. The circle is clipped by ROI rectangle.
 /// </summary>
 /// <param name="img">Image where the circle is drawn</param>
 /// <param name="center">Center of the circle</param>
 /// <param name="radius">Radius of the circle.</param>
 /// <param name="color">Color of the circle</param>
 /// <param name="thickness">Thickness of the circle outline if positive, otherwise indicates that a filled circle has to be drawn</param>
 /// <param name="lineType">Line type</param>
 /// <param name="shift">Number of fractional bits in the center coordinates and radius value</param>
 /// <remarks>
 /// http://docs.opencv.org/2.4/modules/core/doc/drawing_functions.html?highlight=cv2.circle#cv2.circle
 /// </remarks>
 public static void Circle(IInputOutputArray img, Point center, int radius, Color color, int thickness = 1, Emgu.CV.CvEnum.LineType lineType = Emgu.CV.CvEnum.LineType.EightConnected, int shift = 0)
 {
     // No change needed, just wrapping to complete the api
     CvInvoke.Circle(img, center, radius, color.ToMCvSCalar(), thickness, lineType, shift);
 }
Exemplo n.º 7
0
    // Update is called once per frame
    void Update()
    {
        IsAvailable = _sensor.IsAvailable;

        if (depthFrameReader != null)
        {
            var frame = depthFrameReader.AcquireLatestFrame();

            if (frame != null)
            {
                frame.CopyFrameDataToArray(rawDepthPixels);

                //Primero acoto los limites de la mesa
                if (!edgesDetected)
                {
                    //Grafico las profundidades para detectar los bordes de la mesa
                    for (int depth = 0; depth < rawDepthPixels.Length; depth++)
                    {
                        depthPixel = rawDepthPixels[depth];
                        if (depthPixel > MIN_DEPTH && depthPixel < MAX_DEPTH)
                        {
                            colorImage[depth * 3]     = 255;
                            colorImage[depth * 3 + 1] = 255;
                            colorImage[depth * 3 + 2] = 255;
                        }
                        else
                        {
                            colorImage[depth * 3]     = 0;
                            colorImage[depth * 3 + 1] = 0;
                            colorImage[depth * 3 + 2] = 0;
                        }
                    }
                    frameOpenCV.SetTo(colorImage);
                    UMat uimage = new UMat();
                    CvInvoke.CvtColor(frameOpenCV, uimage, ColorConversion.Bgr2Gray);

                    //Suavizo los puntos pequeños
                    Mat erodeElement  = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new System.Drawing.Size(3, 3), new System.Drawing.Point(-1, -1));
                    Mat dilateElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new System.Drawing.Size(10, 10), new System.Drawing.Point(-1, -1));

                    MCvScalar scalarD = new MCvScalar(5, 5);
                    CvInvoke.Erode(uimage, uimage, erodeElement, new System.Drawing.Point(-1, -1), 4, BorderType.Constant, scalarD);
                    CvInvoke.Dilate(uimage, uimage, dilateElement, new System.Drawing.Point(-1, -1), 2, BorderType.Constant, scalarD);


                    //Busco contornos
                    edgesTable = new Emgu.CV.Util.VectorOfVectorOfPoint();
                    Mat heir = new Mat();
                    Image <Rgb, byte> imgout = new Image <Rgb, byte>(frameOpenCV.Width, frameOpenCV.Height, new Rgb(200, 200, 200));
                    CvInvoke.FindContours(uimage, edgesTable, heir, Emgu.CV.CvEnum.RetrType.Ccomp, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);

                    double maxArea = 0;
                    for (int i = 0; i < edgesTable.Size; i++)
                    {
                        var moment = CvInvoke.Moments(edgesTable[i]);
                        area = moment.M00;
                        //Me quedo con el area mas grande que es la mesa de ping pong
                        if (area > maxArea)
                        {
                            //PERO tengo que descartar el area que es todo el cuadrado(el frame de la imagen)
                            if (area < WIDTH * HEIGHT * 0.22 && area > WIDTH * HEIGHT * 0.18)
                            {
                                maxArea   = area;
                                indexArea = i;
                            }
                        }
                    }

                    for (int i = 0; i < edgesTable[indexArea].Size; i++)
                    {
                        //Encuentro el X mas bajo y alto, lo mismo para la Y
                        if (edgesTable[indexArea][i].X > maxX)
                        {
                            maxX = edgesTable[indexArea][i].X + desborde;
                        }
                        if (edgesTable[indexArea][i].X < minX)
                        {
                            minX = edgesTable[indexArea][i].X - desborde;
                        }
                        if (edgesTable[indexArea][i].Y > maxY)
                        {
                            maxY = edgesTable[indexArea][i].Y + desborde;
                        }
                        if (edgesTable[indexArea][i].Y < minY)
                        {
                            minY = edgesTable[indexArea][i].Y - desborde;
                        }
                    }

                    CvInvoke.DrawContours(imgout, edgesTable, indexArea, new MCvScalar(255, 0, 0), 1);
                    CvInvoke.Circle(imgout, new System.Drawing.Point(minX, minY), 2, colorDetected, 2);
                    CvInvoke.Circle(imgout, new System.Drawing.Point(minX, maxY), 2, colorDetected, 2);
                    CvInvoke.Circle(imgout, new System.Drawing.Point(maxX, minY), 2, colorDetected, 2);
                    CvInvoke.Circle(imgout, new System.Drawing.Point(maxX, maxY), 2, colorDetected, 2);
                    edgesDetected = true;
                }


                //Despues mapeo la profundidad de la mesa
                if (!depthMapped && edgesDetected)
                {
                    //Cargo por unica vez la matriz de configuracion de profundidad
                    if (listConfig.Count < CONFIG_ITERACIONES)
                    {
                        var configDepth = new int[WIDTH * HEIGHT];
                        for (int row = minY; row < maxY; row++)
                        {
                            for (int col = minX; col < maxX; col++)
                            {
                                //transformo un fila columna en su equivalente de vector
                                depthPixel = rawDepthPixels[(row * WIDTH) + (col)];
                                if (depthPixel > MIN_DEPTH && depthPixel < MAX_DEPTH)
                                {
                                    configDepth[(row * WIDTH) + (col)] = depthPixel;
                                }
                                else
                                {
                                    //Le pongo 700 para que no se vaya a valor muy bajo con el -1 y no arruine el prom
                                    configDepth[(row * WIDTH) + (col)] = MAX_DEPTH - 200;
                                }
                            }
                        }

                        listConfig.Add(configDepth);
                        if (frame != null)
                        {
                            frame.Dispose();
                            frame = null;
                        }
                        return;
                    }


                    //Una vez que hizo las pasadas de configuracion saco el promedio
                    if (listConfig.Count == CONFIG_ITERACIONES)
                    {
                        //Saco el promedio para cada punto.
                        foreach (var item in listConfig)
                        {
                            for (int depth = 0; depth < averageDepthConfig.Length; depth++)
                            {
                                averageDepthConfig[depth] += item[depth];
                            }
                        }

                        for (int depth = 0; depth < averageDepthConfig.Length; depth++)
                        {
                            averageDepthConfig[depth] /= CONFIG_ITERACIONES;
                        }

                        depthMapped = true;
                        //Y limpio la matriz para que quede todo en negro.
                        for (int i = 0; i < colorImage.Length; i += 3)
                        {
                            colorImage[i + 0] = 0;
                            colorImage[i + 1] = 0;
                            colorImage[i + 2] = 0;
                        }
                    }
                }
                //Recien ahora puedo empezar a detectar profundidades y piques
                if (edgesDetected && depthMapped)
                {
                    for (int row = minY; row < maxY; row++)
                    {
                        for (int col = minX; col < maxX; col++)
                        {
                            //transformo un fila columna en su equivalente de vector
                            depthPixel = rawDepthPixels[(row * WIDTH) + (col)];
                            if (depthPixel > MIN_DEPTH && depthPixel < MAX_DEPTH && depthPixel < averageDepthConfig[(row * WIDTH) + (col)] - 5)
                            {
                                colorImage[(row * WIDTH * 3) + (col * 3) + 0] = 255;
                                colorImage[(row * WIDTH * 3) + (col * 3) + 1] = 255;
                                colorImage[(row * WIDTH * 3) + (col * 3) + 2] = 255;
                            }
                            else
                            {
                                colorImage[(row * WIDTH * 3) + (col * 3) + 0] = 0;
                                colorImage[(row * WIDTH * 3) + (col * 3) + 1] = 0;
                                colorImage[(row * WIDTH * 3) + (col * 3) + 2] = 0;
                            }
                        }
                    }

                    //Transformo mis pixeles en un formato OPENCV
                    frameOpenCV.SetTo(colorImage);
                    UMat uimage = new UMat();
                    CvInvoke.CvtColor(frameOpenCV, uimage, ColorConversion.Bgr2Gray);

                    //CvInvoke.Imshow("kinect camera", frameOpenCV);
                    //Suavizo los puntos pequeños
                    Mat erodeElement  = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new System.Drawing.Size(3, 3), new System.Drawing.Point(-1, -1));
                    Mat dilateElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new System.Drawing.Size(5, 5), new System.Drawing.Point(-1, -1));

                    MCvScalar scalarD = new MCvScalar(5, 5);
                    CvInvoke.Erode(uimage, uimage, erodeElement, new System.Drawing.Point(-1, -1), 2, BorderType.Constant, scalarD);
                    CvInvoke.Dilate(uimage, uimage, dilateElement, new System.Drawing.Point(-1, -1), 4, BorderType.Constant, scalarD);

                    //CvInvoke.Imshow("Vision OPENCV", uimage);


                    //Busco contornos
                    Emgu.CV.Util.VectorOfVectorOfPoint countors = new Emgu.CV.Util.VectorOfVectorOfPoint();
                    Mat heir = new Mat();
                    Image <Rgb, byte> imgout = new Image <Rgb, byte>(frameOpenCV.Width, frameOpenCV.Height, new Rgb(200, 200, 200));
                    CvInvoke.FindContours(uimage, countors, heir, Emgu.CV.CvEnum.RetrType.Ccomp, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);



                    for (int i = 0; i < countors.Size; i++)
                    {
                        var moment = CvInvoke.Moments(countors[i]);
                        area = moment.M00;
                        //PENSAR ALGO MAS SELECTIVO QUE DESCARTE OBJETOS QUE NO SEAN CIRCULOS
                        if (area > MIN_OBJECT_AREA && area < MAX_OBJECT_AREA)
                        {
                            x = (int)(moment.M10 / area);
                            y = (int)(moment.M01 / area);
                            CvInvoke.DrawContours(imgout, countors, i, new MCvScalar(255, 0, 0), 1);
                            break;
                        }
                    }

                    if (x != 0 && y != 0)
                    {
                        int centerDepth = rawDepthPixels[y * WIDTH + x];


                        control.Add(centerDepth - averageDepthConfig[y * WIDTH + x]);

                        // AddTrajectory(false, new System.Drawing.Point(x, y));

                        //Es un pique solo si la diferencia entre la mesa y la pelota es minima, la mesa puede estar inclinada
                        if (centerDepth < averageDepthConfig[y * WIDTH + x] - 5 && centerDepth > averageDepthConfig[y * WIDTH + x] - DEPTH_TOL)
                        {
                            //Se detecto un pique
                            if (centerDepth - beforeCenterDepth >= 0)
                            {
                                if (centerDepth - beforeCenterDepth != 0)
                                {
                                    confirmacion = false;
                                }

                                System.Console.WriteLine("NO Pico" + " BF " + beforeCenterDepth + " CD " + centerDepth + " confirmacion: " + confirmacion);
                            }
                            else
                            {
                                if (!confirmacion)
                                {
                                    System.Console.WriteLine("Pico" + " BF " + beforeCenterDepth + " CD " + centerDepth + " confirmacion: " + confirmacion);
                                    debugBounces.Add(new System.Drawing.Point(beforeX, beforeY));
                                    confirmacion = true;
                                }
                            }
                            beforeCenterDepth = centerDepth;
                            beforeX           = x;
                            beforeY           = y;
                            ball.position     = new Vector3(beforeX + 25, beforeY + 25, 0);
                        }

                        CvInvoke.Circle(imgout, new System.Drawing.Point(x, y), 20, colorDetected, 6);
                        CvInvoke.PutText(imgout, ((double)centerDepth / 1000).ToString("F") + "m", new System.Drawing.Point(x - 38, y + 50), FontFace.HersheyPlain, 1.3, colorBounce, 2);

                        x = 0;
                        y = 0;
                    }

                    foreach (var item in debugBounces)
                    {
                        CvInvoke.Circle(imgout, new System.Drawing.Point(item.X, item.Y), 10, colorBounce, 2);
                    }


                    if (debugBounces.Count > 1)
                    {
                        debugBounces.RemoveAt(0);
                    }
                    foreach (var item in trajectory)
                    {
                        CvInvoke.Circle(imgout, new System.Drawing.Point(item.X, item.Y), 10, colorBounce, 2);
                    }
                    CvInvoke.DrawContours(imgout, edgesTable, indexArea, new MCvScalar(255, 0, 0), 1);
                    CvInvoke.Imshow("Deteccion", imgout);
                }
                if (frame != null)
                {
                    frame.Dispose();
                    frame = null;
                }
            }

            if (frame != null)
            {
                frame.Dispose();
                frame = null;
            }
        }
    }
Exemplo n.º 8
0
        private static Image <Bgr, byte> detectCircle(Image <Bgr, byte> image)
        {
            HoornTimer    timer      = HoornTimer.Instance;
            StringBuilder msgBuilder = new StringBuilder("Performance: ");

            timer.Start();
            //Convert the image to grayscale and filter out the noise
            UMat uimage = new UMat();

            CvInvoke.CvtColor(image, uimage, ColorConversion.Bgr2Gray);
            timer.Stop();
            timer.calculateDiff("1");

            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);

            Image <Gray, Byte> gray = image.Convert <Gray, Byte>().PyrDown().PyrUp();

            #region circle detection
            Stopwatch watch                      = Stopwatch.StartNew();
            double    cannyThreshold             = 180.0;
            double    circleAccumulatorThreshold = 120;
            CircleF[] circles                    = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);

            watch.Stop();
            msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            #region Canny and edge detection
            watch.Reset(); watch.Start();
            double cannyThresholdLinking = 120.0;
            UMat   cannyEdges            = new UMat();
            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);

            LineSegment2D[] lines = CvInvoke.HoughLinesP(
                cannyEdges,
                1,              //Distance resolution in pixel-related units
                Math.PI / 45.0, //Angle resolution measured in radians.
                20,             //threshold
                30,             //min Line width
                10);            //gap between lines


            watch.Stop();
            msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion


            #region draw circles
            timer.Start();
            Image <Bgr, Byte> circleImage = image;
            Image <Bgr, Byte> mask        = new Image <Bgr, byte>(image.Width, image.Height);
            Image <Bgr, byte> dest        = new Image <Bgr, byte>(image.Width, image.Height);
            Image <Bgr, Byte> newUimage   = cannyEdges.ToImage <Bgr, Byte>();


            foreach (CircleF circle in circles)
            {
                //circleImage.Draw(circle, new Bgr(System.Drawing.Color.Blue), -2);
                CvInvoke.Circle(mask, System.Drawing.Point.Round(circle.Center), (int)circle.Radius, new Bgr(System.Drawing.Color.Brown).MCvScalar, -1); // -1 fill ellipse
                dest = image.And(image, mask.Convert <Gray, byte>());
            }
            timer.Stop();
            timer.calculateDiff("2");
            return(dest);

            #endregion
        }
Exemplo n.º 9
0
        public static void batchProcess(string sourceDir, string destinationDir)
        {
            XLWorkbook workBook;

            workBook = File.Exists(destinationDir + "//results.xlsx") ? new XLWorkbook(destinationDir + "//results.xlsx") : new XLWorkbook();
            string[] dirs       = Directory.GetDirectories(sourceDir);
            double   totalDirs  = dirs.Length;
            double   currentdir = 0;

            Console.WriteLine("Processed 0 %");
            foreach (string dir in dirs)
            {
                currentdir++;
                string[] files = Directory.GetFiles(dir);
                Console.WriteLine("Processing Directory : " + dir);
                string destinationPath = destinationDir + @"\" + dir.Replace(sourceDir + @"\", "");
                if (Directory.Exists(destinationPath))
                {
                    continue;
                }
                var sheetName = Path.GetFileName(dir).Trim();
                if (sheetName.Length >= 31)
                {
                    sheetName = sheetName.Substring(0, 9) + " " + sheetName.Substring(sheetName.Length - 10);
                }
                var workSheet = workBook.Worksheets.Add(sheetName);
                workSheet.Cell(1, 1).Value = "File Name";
                workSheet.Cell(1, 2).Value = "Circle Status";
                workSheet.Cell(1, 3).Value = "Center";
                workSheet.Cell(1, 4).Value = "Radius";
                workSheet.Cell(1, 5).Value = "FailCount";
                workSheet.Cell(1, 6).Value = "Time Elapsed";
                workSheet.Cell(1, 7).Value = "Output";
                workSheet.Cell(1, 8).Value = "image Density";
                workSheet.Cell(1, 9).Value = "Edge Points Count";
                Directory.CreateDirectory(destinationPath);
                FileStream densityFile1 = File.Create(destinationPath + "/densities.txt");
                Directory.CreateDirectory(destinationPath + "/Undetected Circles");
                FileStream densityFile2 = File.Create(destinationPath + "/Undetected Circles" + "/densities.txt");
                Directory.CreateDirectory(destinationPath + "/ByPassed Images");
                var index = 1;
                foreach (string file in files)
                {
                    string fileName = Path.GetFileNameWithoutExtension(file);
                    try
                    {
                        Convert.ToInt32(fileName);
                        index++;
                        //Console.WriteLine(file);
                        workSheet.Cell(index, 1).SetValue(fileName).Hyperlink = new XLHyperlink(new Uri(file));
                        Stopwatch watch = new Stopwatch();
                        watch.Start();
                        Edge edgeObject = new Edge(file, true);
                        RandomizedCircleDetection randCircleDetect = new RandomizedCircleDetection(edgeObject, 30, 2000, 15, 1,
                                                                                                   0.01, 0.08, 0.47, EdgeType.CANNY_EDGE, false);
                        List <int[]> circles = randCircleDetect.DetectCircles();
                        watch.Stop();
                        string density;
                        byte[] bytes;
                        if (circles.Count != 0)
                        {
                            workSheet.Cell(index, 2).SetValue(true);
                            workSheet.Cell(index, 2).Style.Font.SetFontColor(XLColor.Green);
                            workSheet.Cell(index, 3).SetValue(new Point(circles[0][0], circles[0][1]));
                            workSheet.Cell(index, 4).SetValue(circles[0][2]);
                            Image <Bgr, byte> image = edgeObject.GetOriginalImage();
                            CvInvoke.Circle(image, new Point(circles[0][0], circles[0][1]), circles[0][2],
                                            new MCvScalar(1, 123, 100), 5);
                            //File.Copy(file, destinationPath + "/" + fileName + ".png", true);
                            //CircleDeleter deleter = new CircleDeleter(edgeObject.GetGrayImage(), circles[0][0], circles[0][1], circles[0][2]);
                            //Image<Gray,byte> textImage = deleter.deleteCircle();
                            //CvInvoke.MedianBlur(textImage, textImage, 3);
                            CvInvoke.Imwrite(destinationPath + @"\" + fileName + ".png", image);
                            // Console.WriteLine(destinationPath + @"\" + fileName + ".png");

                            var temp = destinationPath + Path.DirectorySeparatorChar + fileName + ".png";
                            density = ((double)edgeObject.GetEdgePointsCount() / (double)(edgeObject.Rows * edgeObject.Cols)).ToString() + ": " + file;
                            bytes   = Encoding.ASCII.GetBytes(density.ToCharArray());
                            densityFile1.Write(bytes, 0, bytes.Length);
                            bytes = Encoding.ASCII.GetBytes(Environment.NewLine);
                            densityFile1.Write(bytes, 0, bytes.Length);
                            workSheet.Cell(index, 7).SetValue(fileName).Hyperlink = new XLHyperlink(new Uri(temp));
                        }
                        else
                        {
                            workSheet.Cell(index, 2).SetValue(false);
                            workSheet.Cell(index, 2).Style.Font.SetFontColor(XLColor.Red);
                            File.Copy(file, destinationPath + "/Undetected Circles/" + fileName + ".png", true);
                            workSheet.Cell(index, 3).SetValue("NIL");
                            workSheet.Cell(index, 4).SetValue("NIL");
                            workSheet.Cell(index, 7).SetValue("NIL");
                            density = ((double)edgeObject.GetEdgePointsCount() / (double)(edgeObject.Rows * edgeObject.Cols)).ToString() + ": " + file;
                            bytes   = Encoding.ASCII.GetBytes(density.ToCharArray());
                            densityFile2.Write(bytes, 0, bytes.Length);
                            bytes = Encoding.ASCII.GetBytes(Environment.NewLine);
                            densityFile2.Write(bytes, 0, bytes.Length);
                        }

                        workSheet.Cell(index, 5).SetValue(randCircleDetect.FailCount);
                        workSheet.Cell(index, 6).SetValue(watch.ElapsedMilliseconds.ToString() + "ms");
                        workSheet.Cell(index, 8).SetValue(randCircleDetect.ImageDensity);
                        workSheet.Cell(index, 9).SetValue(edgeObject.GetEdgePointsCount());
                    }
                    catch (Exception)
                    {
                        File.Copy(file, destinationPath + "/ByPassed Images/" + fileName + ".png", true);
                    }
                }
                index += 3;
                workSheet.Cell(index, 3).SetValue("Edge Type");
                workSheet.Cell(index, 3).Style.Font.SetBold();
                workSheet.Cell(index, 4).SetValue("Canny Edge");
                workSheet.Cell(index, 4).Style.Font.SetBold();

                index++;
                workSheet.Cell(index, 3).SetValue("Min Edge Pts in Image Threshold");
                workSheet.Cell(index, 3).Style.Font.SetBold();
                workSheet.Cell(index, 4).SetValue("30");
                workSheet.Cell(index, 4).Style.Font.SetBold();

                index++;
                workSheet.Cell(index, 3).SetValue("Min Dist Btwn Edge Pts in Image Threshold");
                workSheet.Cell(index, 3).Style.Font.SetBold();
                workSheet.Cell(index, 4).SetValue("15");
                workSheet.Cell(index, 4).Style.Font.SetBold();

                index++;
                workSheet.Cell(index, 3).SetValue("Allowed Radius Error");
                workSheet.Cell(index, 3).Style.Font.SetBold();
                workSheet.Cell(index, 4).SetValue("1");
                workSheet.Cell(index, 4).Style.Font.SetBold();

                index++;
                workSheet.Cell(index, 3).SetValue("Circumfrence range");
                workSheet.Cell(index, 3).Style.Font.SetBold();
                workSheet.Cell(index, 4).SetValue("0.47");
                workSheet.Cell(index, 4).Style.Font.SetBold();


                index++;
                workSheet.Cell(index, 3).SetValue("Max Allowed Fail Count");
                workSheet.Cell(index, 3).Style.Font.SetBold();
                workSheet.Cell(index, 4).SetValue("2000");
                workSheet.Cell(index, 4).Style.Font.SetBold();


                index++;
                workSheet.Cell(index, 3).SetValue("Min Allowed Image Density");
                workSheet.Cell(index, 3).Style.Font.SetBold();
                workSheet.Cell(index, 4).SetValue("0.01");
                workSheet.Cell(index, 4).Style.Font.SetBold();


                index++;
                workSheet.Cell(index, 3).SetValue("Max Allowed Image Density");
                workSheet.Cell(index, 3).Style.Font.SetBold();
                workSheet.Cell(index, 4).SetValue("0.08");
                workSheet.Cell(index, 4).Style.Font.SetBold();

                densityFile1.Close();
                densityFile2.Close();
                Console.WriteLine("Processed : " + (currentdir * 100 / totalDirs));
            }
            if (File.Exists(destinationDir + "//results.xlsx"))
            {
                workBook.Save();
            }
            else
            {
                workBook.SaveAs(destinationDir + Path.DirectorySeparatorChar + "results.xlsx");
            }
        }
Exemplo n.º 10
0
        public static string Detect(Image <Bgr, Byte> bgrImg)
        {
            string result   = "Unknown";
            Mat    hsvImage = new Mat();

            //Convert input to hsv image
            CvInvoke.CvtColor(bgrImg, hsvImage, ColorConversion.Bgr2Hsv);

            //Threshold image, keep only the red pixel
            Mat lower_red_hue_range = new Mat();
            Mat upper_red_hue_range = new Mat();

            CvInvoke.InRange(hsvImage, new ScalarArray(new MCvScalar(0, 100, 100)), new ScalarArray(new MCvScalar(80, 255, 255)), lower_red_hue_range); //80(multi color) -> 10(only red)
            CvInvoke.InRange(hsvImage, new ScalarArray(new MCvScalar(160, 100, 100)), new ScalarArray(new MCvScalar(179, 255, 255)), upper_red_hue_range);

            //Scale for yellow
            //CvInvoke.InRange(hsvImage, new ScalarArray(new MCvScalar(20, 100, 100)), new ScalarArray(new MCvScalar(30, 255, 255)), upper_red_hue_range);
            //imgHSV, cvScalar(20, 100, 100), cvScalar(30, 255, 255), imgThreshed)


            // Combine the above two images
            Mat red_hue_image = new Mat();

            CvInvoke.AddWeighted(lower_red_hue_range, 1.0, upper_red_hue_range, 1.0, 0.0, red_hue_image);
            CvInvoke.GaussianBlur(red_hue_image, red_hue_image, new Size(9, 9), 2, 2);

            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(red_hue_image, pyrDown);
            CvInvoke.PyrUp(pyrDown, red_hue_image);


            //Image<Gray, Byte> gray = img.Convert<Gray, Byte>().PyrDown().PyrUp();

            #region circle detection
            Stopwatch watch                      = Stopwatch.StartNew();
            double    cannyThreshold             = 350; //red_hue_image.Rows / 8; //200
            double    circleAccumulatorThreshold = 80;  //100; //100
            CircleF[] circles                    = CvInvoke.HoughCircles(red_hue_image, HoughType.Gradient, 2.0, 20.0, cannyThreshold, circleAccumulatorThreshold, 5);


            watch.Stop();
            //msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));
            #endregion

            #region draw circles
            //Draw new image only circle.
            //Mat circleImage = new Mat(bgrImage.Size, DepthType.Cv8U, 3);
            //circleImage.SetTo(new MCvScalar(0));
            foreach (CircleF circle in circles)
            {
                CvInvoke.Circle(bgrImg, Point.Round(circle.Center), (int)circle.Radius, new Bgr(Color.Violet).MCvScalar, 2);
            }

            #endregion
            if (circles.Length > 0)
            {
                foreach (CircleF circle in circles)
                {
                    Byte redValue = red_hue_image.Bitmap.GetPixel((int)circle.Center.X, (int)circle.Center.Y).R;
                    if (redValue > 100 && redValue < 200)
                    {
                        result = "Green";
                    }
                    else if (redValue > 200 && redValue < 250)
                    {
                        result = "Yellow";
                    }
                    else if (redValue > 250)
                    {
                        result = "Red";
                    }
                }
            }
            return(result);
        }
Exemplo n.º 11
0
        private Mat ProcessFrame(Mat frame)
        {
            var img = frame.Clone();

            CvInvoke.PutText(img, _i.ToString(), new Point(30, 30), FontFace.HersheyPlain, 1, new MCvScalar(255, 0, 0), 2);
            _i++;

            try
            {
                Algorithm.Process(img, _newMap, (normal, points, points2D) =>
                {
                    Planes.Add(new PlaneInfo
                    {
                        Normal   = normal,
                        Points3D = points
                    });

                    var edges = new Mat();
                    CvInvoke.Canny(img, edges, 0.1, 99);
                    var contours  = new VectorOfVectorOfPoint();
                    var hierarchy = new Mat();

                    CvInvoke.FindContours(edges, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxSimple);

                    for (int i = 0; i < contours.Size; i++)
                    {
                        var approx = new Mat();
                        CvInvoke.ApproxPolyDP(contours[i], approx, 9, true);

                        var approxMat = new Matrix <double>(approx.Rows, approx.Cols, approx.DataPointer);

                        double a = Math.Abs(CvInvoke.ContourArea(approx));
                        if (approxMat.Rows == 4 && Math.Abs(CvInvoke.ContourArea(approx)) > 3000 &&
                            CvInvoke.IsContourConvex(approx))
                        {
                            CvInvoke.DrawContours(img, contours, i, new MCvScalar(0, 255, 0), 2);
                        }
                    }

                    CvInvoke.Polylines(img, points2D, true, new MCvScalar(0, 0, 255), 3, LineType.AntiAlias);

                    Capture = null;

                    #region Rectangle

                    //var rect = CvInvoke.BoundingRectangle(Utils.GetPointsVector(Algorithm.TrackedFeatures));
                    //var rectPoints = new[]
                    //{
                    //    new PointF(rect.Location.X, rect.Location.Y + rect.Y),
                    //    rect.Location,
                    //    new PointF(rect.Location.X + rect.X, rect.Location.Y),
                    //    new PointF(rect.Location.X + rect.X, rect.Location.Y + rect.Y)
                    //};
                    //rectPoints = CvInvoke.PerspectiveTransform(rectPoints, homography);
                    //var points2D = Array.ConvertAll(rectPoints, Point.Round);

                    ////CvInvoke.CvtColor(img, img, ColorConversion.Bgr2Gray);
                    //var edges = new Mat();
                    //CvInvoke.Canny(img, edges, 0.1, 99);

                    //var contours = new VectorOfVectorOfPoint();
                    //var hierarchy = new Mat();

                    //CvInvoke.FindContours(edges, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxSimple);

                    //int largestContourIndex = 0;
                    //double largestArea = 0;
                    //VectorOfPoint largestContour;

                    //for (int i = 0; i < contours.Size; i++)
                    //{
                    //    var approx = new Mat();
                    //    CvInvoke.ApproxPolyDP(contours[i], approx, 9, true);

                    //    var approxMat = new Matrix<double>(approx.Rows, approx.Cols, approx.DataPointer);

                    //    double a = Math.Abs(CvInvoke.ContourArea(approx));
                    //    if (approxMat.Rows == 4 && Math.Abs(CvInvoke.ContourArea(approx)) > 500 &&
                    //            CvInvoke.IsContourConvex(approx))
                    //    {
                    //        CvInvoke.DrawContours(img, contours, i, new MCvScalar(0, 255, 0), 2);
                    //    }
                    //}

                    //while (true)
                    //{
                    //    rect.Inflate(20, 20);
                    //    if (rect.Size.Width > 200 || rect.Height > 200)
                    //        break;
                    //}

                    #endregion

                    #region Dot product

                    //var rotationVector32F = new VectorOfFloat();
                    //var translationVector32F = new VectorOfFloat();
                    //var rotationVector = new Mat();
                    //var translationVector = new Mat();

                    //CvInvoke.SolvePnP(Algorithm.TrackedFeatures3D, Utils.GetPointsVector(Algorithm.TrackedFeatures), _calibration.Intrinsic, _calibration.Distortion, rotationVector, translationVector);

                    //rotationVector.ConvertTo(rotationVector32F, DepthType.Cv32F);
                    //translationVector.ConvertTo(translationVector32F, DepthType.Cv32F);

                    //var rotationMat = new Mat();
                    //CvInvoke.Rodrigues(rotationVector32F, rotationMat);
                    //var rotationMatrix = new Matrix<double>(rotationMat.Rows, rotationMat.Cols, rotationMat.DataPointer);

                    //var tvec = translationVector32F.ToArray().Select(i => (double)i).ToArray();

                    //var cameraPosition = rotationMatrix.Transpose() * new Matrix<double>(tvec);
                    //var cameraPositionPoint = new MCvPoint3D32f((float)cameraPosition[0, 0], (float)cameraPosition[1, 0], (float)cameraPosition[2, 0]);

                    //var cameraVector = Algorithm.TrackedFeatures3D[0] - cameraPositionPoint;

                    //Func<double, double> radianToDegree = angle => angle * (180.0 / Math.PI);

                    //double dotProduct = new double[] { cameraVector.X, cameraVector.Y, cameraVector.Z }.Dot(new[] { normal[0, 0], normal[0, 1], normal[0, 2] });
                    //double acos = Math.Acos(dotProduct);
                    //double angle5 = radianToDegree(acos);

                    //double t = dotProduct;

                    //var projected = CvInvoke.ProjectPoints(Algorithm.TrackedFeatures3D.ToArray(), rotationVector32F, translationVector32F, _calibration.Intrinsic, _calibration.Distortion);

                    //foreach (var pr in projected)
                    //{
                    //    CvInvoke.Circle(img, new Point((int)pr.X, (int)pr.Y), 2, new MCvScalar(0, 255, 0), 2, LineType.AntiAlias);
                    //}

                    #endregion

                    //CvInvoke.Rectangle(img, rect, new MCvScalar(0, 0, 255), 3, LineType.AntiAlias);
                    //CvInvoke.Polylines(img, points2D, true, new MCvScalar(0, 255, 0), 3, LineType.AntiAlias);

                    //Console.WriteLine($"Normal: [{normal.Data[0, 0]}, {normal.Data[0, 1]}, {normal.Data[0, 2]}]");
                    //Console.WriteLine($"Angle: {angle5}");
                    //Console.WriteLine($"Dot product: {dotProduct}");
                    //Console.WriteLine($"Points: [{string.Join(",", points.ToArray().Select(p => $"[{p.X}, {p.Y}, {p.Z}]").ToArray())}]");
                });
            }
            catch (Exception ex)
            {
                _newMap = true;
                Algorithm.ResetAlgorithm();
            }

            if (Algorithm.IsBootstrapping)
            {
                _newMap = false;
                return(img);
            }
            if (Algorithm.IsBootstrapping || !Algorithm.IsTracking)
            {
                _newMap = true;
                return(img);
            }

            //Thread.Sleep(1000);

            #region Draw axis

            //var axis = new VectorOfPoint3D32F(new[]
            //{
            //    new MCvPoint3D32f(0.01f, 0, 0),
            //    new MCvPoint3D32f(0, 0.01f, 0),
            //    new MCvPoint3D32f(0, 0, 0.01f)
            //});

            //var imgPoints = new VectorOfPointF();
            //CvInvoke.ProjectPoints(axis, Algorithm.Raux, Algorithm.Taux, _calibration.Intrinsic, _calibration.Distortion, imgPoints);

            //var centers = new VectorOfPoint3D32F(new[] { new MCvPoint3D32f(0, 0, 0) });
            //var centerPoints = new VectorOfPointF();
            //CvInvoke.ProjectPoints(centers, Algorithm.Raux, Algorithm.Taux, _calibration.Intrinsic, _calibration.Distortion,
            //    centerPoints);

            //CvInvoke.Line(img, new Point((int)centerPoints[0].X, (int)centerPoints[0].Y), new Point((int)imgPoints[0].X, (int)imgPoints[0].Y), new MCvScalar(255, 0, 0), 5);
            //CvInvoke.Line(img, new Point((int)centerPoints[0].X, (int)centerPoints[0].Y), new Point((int)imgPoints[1].X, (int)imgPoints[1].Y), new MCvScalar(0, 255, 0), 5);
            //CvInvoke.Line(img, new Point((int)centerPoints[0].X, (int)centerPoints[0].Y), new Point((int)imgPoints[2].X, (int)imgPoints[2].Y), new MCvScalar(0, 0, 255), 5);

            #endregion

            #region Draw keypoints and projected 3D points

            var projected3DfeaturesrPoints = new VectorOfPointF();
            CvInvoke.ProjectPoints(Algorithm.TrackedFeatures3D, Algorithm.Raux, Algorithm.Taux, _calibration.Intrinsic,
                                   _calibration.Distortion, projected3DfeaturesrPoints);
            for (int i = 0; i < projected3DfeaturesrPoints.Size; i++)
            {
                var feature = projected3DfeaturesrPoints[i];
                CvInvoke.Circle(img, new Point((int)feature.X, (int)feature.Y), 2,
                                new MCvScalar(0, 255, 0), 2);
            }

            for (int i = 0; i < Algorithm.TrackedFeatures.Size; i++)
            {
                var feature = Algorithm.TrackedFeatures[i];
                CvInvoke.Circle(img, new Point((int)feature.Point.X, (int)feature.Point.Y), 1,
                                new MCvScalar(0, 0, 255), 1);
            }

            #endregion

            _newMap = true;

            return(img);
        }
Exemplo n.º 12
0
        void processFrameAndUpdateGUI(object sender, EventArgs arg)
        {
            currentFrame = grabber.QueryFrame();

            if (currentFrame == null)
            {
                MessageBox.Show("unable to read from webcam" + Environment.NewLine + Environment.NewLine +
                                "exiting program");
                Environment.Exit(0);
                return;
            }

            Mat imgHSV = new Mat(currentFrame.Size, DepthType.Cv8U, 3);

            Mat imgThreshLow  = new Mat(currentFrame.Size, DepthType.Cv8U, 1);
            Mat imgThreshHigh = new Mat(currentFrame.Size, DepthType.Cv8U, 1);

            Mat imgThresh = new Mat(currentFrame.Size, DepthType.Cv8U, 1);

            CvInvoke.CvtColor(currentFrame, imgHSV, ColorConversion.Bgr2Hsv);

            //CvInvoke.InRange(imgHSV, new ScalarArray(new MCvScalar(0, 155, 155)), new ScalarArray(new MCvScalar(18, 255, 255)), imgThreshLow);
            //CvInvoke.InRange(imgHSV, new ScalarArray(new MCvScalar(165, 155, 155)), new ScalarArray(new MCvScalar(179, 255, 255)), imgThreshHigh);

            //CvInvoke.Add(imgThreshLow, imgThreshHigh, imgThresh);

            //CvInvoke.InRange(imgHSV, new ScalarArray(new MCvScalar(int.Parse(boxHue.Text) - 10, 100, 100)), new ScalarArray(new MCvScalar(int.Parse(boxHue.Text) + 10, 255, 255)), imgThresh);

            CvInvoke.InRange(imgHSV, new ScalarArray(new MCvScalar(getHue() - 10, 130, 130)), new ScalarArray(new MCvScalar(getHue() + 10, 255, 255)), imgThresh);

            CvInvoke.GaussianBlur(imgThresh, imgThresh, new Size(3, 3), 0);

            Mat structuringElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));

            CvInvoke.Dilate(imgThresh, imgThresh, structuringElement, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0, 0, 0));
            CvInvoke.Erode(imgThresh, imgThresh, structuringElement, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0, 0, 0));


            // circles way



            //CircleF[] circles = CvInvoke.HoughCircles(imgThresh, HoughType.Gradient, 2.0, imgThresh.Rows / 4, 100, 50, 20, 200);

            ////CvInvoke.FindContours()
            //float max = 0;
            //CircleF f = new CircleF(new PointF(0,0), -1);

            //foreach (CircleF circle in circles)
            //{
            //    if (circle.Radius > max)
            //    {
            //        max = circle.Radius;
            //        f = circle;
            //    }

            //}


            ////foreach (CircleF circle in circles)
            ////{
            ////    if (boxBallPosition.Text != "")
            ////    {                         // if we are not on the first line in the text box
            ////        boxBallPosition.AppendText(Environment.NewLine);         // then insert a new line char
            ////    }

            ////    boxBallPosition.AppendText("ball position x = " + circle.Center.X.ToString().PadLeft(4) + ", y = " + circle.Center.Y.ToString().PadLeft(4) + ", radius = " + circle.Radius.ToString("###.000").PadLeft(7));
            ////    boxBallPosition.ScrollToCaret();             // scroll down in text box so most recent line added (at the bottom) will be shown

            ////    CvInvoke.Circle(currentFrame, new Point((int)circle.Center.X, (int)circle.Center.Y), (int)circle.Radius, new MCvScalar(0, 0, 255), 2);
            ////    CvInvoke.Circle(currentFrame, new Point((int)circle.Center.X, (int)circle.Center.Y), 3, new MCvScalar(0, 255, 0), -1);
            ////}

            //if (f.Radius > 0)
            //{
            //    if (boxBallPosition.Text != "")
            //    {                         // if we are not on the first line in the text box
            //        boxBallPosition.AppendText(Environment.NewLine);         // then insert a new line char
            //    }

            //    boxBallPosition.AppendText("ball position x = " + f.Center.X.ToString().PadLeft(4) + ", y = " + f.Center.Y.ToString().PadLeft(4) + ", radius = " + f.Radius.ToString("###.000").PadLeft(7));
            //    boxBallPosition.ScrollToCaret();             // scroll down in text box so most recent line added (at the bottom) will be shown

            //    CvInvoke.Circle(currentFrame, new Point((int)f.Center.X, (int)f.Center.Y), (int)f.Radius, new MCvScalar(0, 0, 255), 2);
            //    CvInvoke.Circle(currentFrame, new Point((int)f.Center.X, (int)f.Center.Y), 3, new MCvScalar(0, 255, 0), -1);

            //    if (trackBall)
            //    {
            //        foundBall = true;
            //        rotateHand((int)f.Center.X, (int)f.Center.Y);
            //    }

            //}

            if (trackBall && !foundBall && !maxedOut[0][0] && !maxedOut[0][1])
            {
                if (framePause++ == frameLimit)
                {
                    sendCode(MOVE_LEFT, PAUSE, PAUSE, PAUSE);
                    framePause = 0;
                }
            }

            VectorOfVectorOfPoint contours  = new VectorOfVectorOfPoint();
            IOutputArray          hierarchy = null;

            CvInvoke.FindContours(imgThresh, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
            double largest_area          = 30;
            int    largest_contour_index = 0;

            foundBall = false;
            int count = contours.Size;

            for (int i = 0; i < count; i++)
            {
                double a = CvInvoke.ContourArea(contours[i], false);  //  Find the area of contour
                if (a > largest_area)
                {
                    largest_area          = a;
                    largest_contour_index = i;                //Store the index of largest contour
                    foundBall             = true;
                }
            }

            if (trackBall && foundBall)
            {
                middleX = 0;
                middleY = 0;
                maxDist = 0;
                double newDist;
                int    x1, x2, y1, y2;
                int    size = contours[largest_contour_index].Size;
                for (int i = 0; i < size; i++)
                {
                    middleX += contours[largest_contour_index][i].X;
                    middleY += contours[largest_contour_index][i].Y;
                }
                for (int i = 0; i < size / 2; i++)
                {
                    x1      = contours[largest_contour_index][i].X;
                    x2      = contours[largest_contour_index][i + size / 2].X;
                    y1      = contours[largest_contour_index][i].Y;
                    y2      = contours[largest_contour_index][i + size / 2].Y;
                    newDist = Math.Sqrt(Math.Pow((x2 - x1), 2) + Math.Pow((y2 - y1), 2));
                    if (maxDist < newDist)
                    {
                        maxDist = newDist;
                    }
                }

                middleX = middleX / contours[largest_contour_index].Size;
                middleY = middleY / contours[largest_contour_index].Size;

                CvInvoke.Circle(currentFrame, new Point((int)middleX, (int)middleY), 3, new MCvScalar(0, 255, 0), -1);

                CvInvoke.Circle(currentFrame, new Point((int)middleX, (int)middleY), (int)maxDist / 2, new MCvScalar(0, 255, 0), 2);

                boxBallPosition.AppendText("maxDist = " + maxDist + "   largest Area: " + largest_area);
                boxBallPosition.ScrollToCaret();

                if (framePause++ == frameLimit)
                {
                    rotateHand(middleX, middleY);
                    framePause = 0;
                }
            }

            CvInvoke.DrawContours(imgThresh, contours, largest_contour_index, new MCvScalar(255, 0, 0));
            CvInvoke.DrawContours(currentFrame, contours, largest_contour_index, new MCvScalar(255, 0, 0));

            if (boxChangeImg.Checked)
            {
                imgBoxFace.Image = imgThresh;
            }
            else
            {
                imgBoxFace.Image = currentFrame;
            }
        }
Exemplo n.º 13
0
        void FrameGrabber_Parrellel(object sender, EventArgs e)
        {
            //Get the current frame form capture device
            currentFrame = grabber.QueryFrame();
            grayFrame    = new Mat(currentFrame.Size, DepthType.Cv8U, 3);

            //Convert it to Grayscale
            //Clear_Faces_Found();

            if (currentFrame != null)
            {
                CvInvoke.CvtColor(currentFrame, grayFrame, ColorConversion.Bgr2Gray);

                //Face Detector
                Rectangle[] facesDetected = faceClassifier.DetectMultiScale(grayFrame, 1.2, 10);
                //Rectangle[] facesDetected = Face.DetectMultiScale(gray_frame, 1.2, 10, new Size(50, 50), Size.Empty);

                if (trackFace && !foundFace && !maxedOut[0][0] && !maxedOut[0][1])
                {
                    sendCode(MOVE_LEFT, PAUSE, PAUSE, PAUSE);
                }

                //Action for each element detected
                Parallel.For(0, facesDetected.Length, i =>
                {
                    try
                    {
                        facesDetected[i].X      += (int)(facesDetected[i].Height * 0.15);
                        facesDetected[i].Y      += (int)(facesDetected[i].Width * 0.22);
                        facesDetected[i].Height -= (int)(facesDetected[i].Height * 0.3);
                        facesDetected[i].Width  -= (int)(facesDetected[i].Width * 0.35);

                        result = new Mat(currentFrame, facesDetected[i]).ToImage <Gray, byte>();
                        result._EqualizeHist();
                        //draw the face detected in the 0th (gray) channel with blue color
                        CvInvoke.Rectangle(currentFrame, facesDetected[i], new MCvScalar(255, 0, 0), 2);


                        // reset foundface
                        //foundFace = false;

                        if (eigenRecognition.IsTrained)
                        {
                            string name     = eigenRecognition.Recognise(result);
                            int match_value = (int)eigenRecognition.Get_Eigen_Distance;
                            //Draw the label for each face detected and recognized
                            CvInvoke.PutText(currentFrame, name + " ", new Point(facesDetected[i].X, facesDetected[i].Y), FontFace.HersheyComplex, 1, new MCvScalar(0, 255, 0), 1);
                            Console.WriteLine(name);
                            if (trackFace && name.Equals(trackName))
                            {
                                foundFace = true;
                                middleX   = facesDetected[i].X + facesDetected[i].Width / 2;
                                middleY   = facesDetected[i].Y + facesDetected[i].Height / 2;
                                CvInvoke.Circle(currentFrame, new Point((int)middleX, (int)middleY), 3, new MCvScalar(0, 255, 0), -1);

                                if (framePause++ == frameLimit)
                                {
                                    rotateHand(middleX, middleY);
                                    framePause = 0;
                                }
                            }
                            else
                            {
                                if (framePause++ == frameLimit)
                                {
                                    sendCode(MOVE_LEFT, PAUSE, PAUSE, PAUSE);
                                    framePause = 0;
                                }
                            }
                        }
                    }
                    catch
                    {
                        //do nothing as parrellel loop buggy
                        //No action as the error is useless, it is simply an error in
                        //no data being there to process and this occurss sporadically
                    }
                });
                //Show the faces procesed and recognized
                imgBoxFace.Image = currentFrame;
            }
        }
Exemplo n.º 14
0
    //Red Segmentation
    public static BitmapSource Procred(BitmapSource Image)
    {
        //Checks to see if there is an image
        if (Image != null)
        {
            //Converts to image<>
            MemoryStream  Stream  = new MemoryStream();
            BitmapEncoder encoded = new BmpBitmapEncoder();
            encoded.Frames.Add(BitmapFrame.Create(Image));
            encoded.Save(Stream);
            System.Drawing.Bitmap myBmp     = new System.Drawing.Bitmap(Stream);        //Casts image to bitmap
            Image <Hsv, Byte>     processed = new Image <Hsv, Byte>(myBmp);             //Casts bitmap to image<Hsv, byte>

            //Main processing
            CvInvoke.Flip(processed, processed, Emgu.CV.CvEnum.FlipType.Horizontal);    //Flips the image in the horizontal
            Image <Gray, Byte> Thr1;                                                    //Creates two Grayscale images that will be used when segmenting
            Thr1 = processed.InRange(new Hsv(170, 120, 70), new Hsv(180, 255, 255));    //Handles second range for RED

            //Handles noise and cleans image
            Mat kernel = Mat.Ones(3, 3, Emgu.CV.CvEnum.DepthType.Cv32F, 1);             //Creates 3x3 kernel for use as kernel
            CvInvoke.MorphologyEx(Thr1, Thr1, Emgu.CV.CvEnum.MorphOp.Open, kernel, new System.Drawing.Point(0, 0), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar(1));
            CvInvoke.MorphologyEx(Thr1, Thr1, Emgu.CV.CvEnum.MorphOp.Dilate, kernel, new System.Drawing.Point(0, 0), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar(1));

            //Extracts only RED parts from orignal image
            Mat Mask;                                                                                 //Creates Mat for converting mask to Mat
            Mask = Thr1.Mat;                                                                          //Casts mask to Mat
            Image <Hsv, byte> Redisolated = new Image <Hsv, byte>(processed.Width, processed.Height); //Creates Image<Hsv,byte> for final processed image

            //CvInvoke.BitwiseAnd(processed, processed, Redisolated, Mask);                     //ANDS mask with orignal image to retain only portions that are RED

            //Extracts biggest blob
            //Variables
            double                Largestarea = 0;
            int                   Largestcontourindex = 0, X, Y;
            MCvPoint2D64f         Center;
            Image <Gray, Byte>    Centroid     = new Image <Gray, Byte>(processed.Width, processed.Height);
            Image <Gray, Byte>    Contourdrawn = new Image <Gray, Byte>(processed.Width, processed.Height);
            VectorOfVectorOfPoint Contours     = new VectorOfVectorOfPoint();
            Mat                   Hierarchy    = new Mat();

            //Processing
            CvInvoke.FindContours(Thr1, Contours, Hierarchy, Emgu.CV.CvEnum.RetrType.Ccomp, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);    //Finds contours in image

            //Iterates through each contour
            for (int i = 0; i < Contours.Size; i++)
            {
                double a = CvInvoke.ContourArea(Contours[i], false);                    //  Find the area of contour
                if (a > Largestarea)
                {
                    Largestarea         = a;
                    Largestcontourindex = i;                                            //Stores the index of largest contour
                    //bounding_rect=boundingRect(contours[i]);                          // Find the bounding rectangle for biggest contour
                }
            }

            CvInvoke.DrawContours(Contourdrawn, Contours, Largestcontourindex, new MCvScalar(255, 255, 255), 10, Emgu.CV.CvEnum.LineType.Filled, Hierarchy, 0); //Draws biggest contour on blank image
            Moments moments = CvInvoke.Moments(Contourdrawn, true);                                                                                             //Gets the moments of the dranw contour
            Center = moments.GravityCenter;                                                                                                                     //converts the moment to a center

            try
            {
                X = Convert.ToInt32(Center.X);                                          //Converts to integer
                Y = Convert.ToInt32(Center.Y);
                Debug.WriteLine("X - {0}, Y - {1}", X, Y);                              //Prints centre co-ords to console
                CvInvoke.Circle(Centroid, new System.Drawing.Point(X, Y), 10, new MCvScalar(255, 255, 255), -1);
            }
            catch { Debug.WriteLine("No RED in detected"); }

            //Cleanup
            Mask.Dispose();
            Thr1.Dispose();
            Stream.Dispose();
            myBmp.Dispose();

            return(BitmapSourceConvert.ToBitmapSource(Centroid));                          //Returns processed image
        }
        else
        {
            return(null);
        }
    }
Exemplo n.º 15
0
        public static ProcessFrameResult ProcessFromFile(Mat frame, int firstCannyThresh = 100, int secondCannyThresh = 60, int firstCannyThresh1 = 120, int secondCannyThresh1 = 50, TargetDetails useThisTarget = null)
        {
            ProcessFrameResult result = new ProcessFrameResult();

            result.Target             = new TargetDetails();
            result.Target.BlackCenter = useThisTarget.BlackCenter;
            result.Target.BlackR      = useThisTarget.BlackR;

            var pix               = Pix(useThisTarget.BlackR);
            int zapasSize         = 5; //zapas z jakim ma wykrywać przestrzeline - to wywalić do jakiegoś txtbox czy coś
            int czteryIpolmmR_int = Convert.ToInt32(FourNHalfR(pix));


            Mat circleImage = frame;

            #region hocki klocki przepierdalanie obrazu

            var inputImage = frame.ToImage <Bgr, byte>();
            // inputImage._EqualizeHist();
            inputImage._GammaCorrect(0.4d);
            result.GrSmootWarped = inputImage.Mat;

            #region blur gray canny samej tarczy

            Mat canny_output12 = new Mat();
            //As for your answer to number two, blur the image, convert to greyscale, then threshold to eliminate lighting differences is the usual solution

            Mat smallGrayFrame12    = new Mat();
            Mat smoothedGrayFrame12 = new Mat();
            CvInvoke.PyrDown(inputImage.Mat, smallGrayFrame12);
            CvInvoke.PyrUp(smallGrayFrame12, smoothedGrayFrame12);
            CvInvoke.CvtColor(smoothedGrayFrame12, smoothedGrayFrame12, ColorConversion.Bgr2Gray);
            // CvInvoke.GaussianBlur(smoothedGrayFrame12, smoothedGrayFrame12, new Size(9, 9), 1, 1);
            result.SmoothedOryginal = smoothedGrayFrame12;


            #region test
            //#######test

            //                   double otsu_thresh_val12 = CvInvoke.Threshold(smoothedGrayFrame12, fake12, firstCannyThresh, secondCannyThresh, ThresholdType.Binary & ThresholdType.Otsu);
            //CvInvoke.AdaptiveThreshold(smoothedGrayFrame12, fake12, 255, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 3, 2);
            //1CvInvoke.GaussianBlur(fake12, fake12, new Size(9, 9), 1, 1);
            //  CvInvoke.GaussianBlur(fake12, fake12, new Size(9, 9), 1, 1);
            //    C1111vInvoke.GaussianBlur(fake12, fake12, new Size(9, 9), 1, 1);
            //            We don't need the _img. We are interested in only the otsu_thresh_val but unfortunately, currently there is no method in OpenCV which allows you to compute only the threshold value.

            //Use the Otsu's threshold value as higher threshold and half of the same as the lower threshold for Canny's algorithm.

            //double high_thresh_val1 = otsu_thresh_val1,
            //       lower_thresh_val1 = otsu_thresh_val1 * 0.5;



            //CvInvoke.GaussianBlur(warped, fake12, new Size(9, 9), 1, 1);
            CvInvoke.Canny(smoothedGrayFrame12, canny_output12, firstCannyThresh, secondCannyThresh);
            //                    CvInvoke.Canny(smoothedGrayFrame12, canny_output12, 120, 50);
            // CvInvoke.GaussianBlur(canny_output12, canny_output12, new Size(11, 11), 1, 1);
            //    CvInvoke.GaussianBlur(canny_output12, canny_output12, new Size(7, 7), 1, 1);

            // result.WarpedTargetCanny = canny_output12;//$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
            //#######test
            #endregion test

            #endregion blur gray canny samej tarczy


            // result.SmOryCanny = smoothedGrayFrame12;

            #endregion  hocki klocki przepierdalanie obrazu


            //odpal manualne pozycjonowanie
            ManualShotPositioning msp = new ManualShotPositioning();
            msp.SetTargetAndShot(circleImage, czteryIpolmmR_int, useThisTarget);
            DialogResult dr = msp.ShowDialog();
            CvInvoke.Circle(circleImage, Point.Round(msp.SelectedPoint), czteryIpolmmR_int, new Bgr(Color.DeepPink).MCvScalar, 1, LineType.AntiAlias, 0);
            result.Shot = WyliczWartoscPrzestrzeliny(msp.SelectedPoint, useThisTarget);
            DrawCircles(circleImage, pix, useThisTarget.BlackCenter);
            result.TargetScanWithResult = circleImage;//$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$

            return(result);
        }
Exemplo n.º 16
0
        /// <summary>
        /// 图像处理定时器,定时响应函数,完成霍夫变换,矩形三角形检测
        /// </summary>
        private void ProcessTick(object sender, EventArgs e)
        {
            Stopwatch watch = new Stopwatch();      //计时运算时间

            if (houghCircles_radioButton.Checked)
            {
                #region circle detection
                watch.Start();
                //霍夫圆变换的四个参数
                double cannyThreshold             = (double)param1_UpDown.Value;
                double circleAccumulatorThreshold = (double)param2_UpDown.Value;
                double dp       = (double)dp_UpDown.Value;
                double min_dist = (double)minDist_UpDown.Value;
                //霍夫圆变换检测圆形
                CircleF[] circles = CvInvoke.HoughCircles(_smoothedGrayFrame, HoughType.Gradient,
                                                          dp, min_dist, cannyThreshold, circleAccumulatorThreshold, 5);
                watch.Stop();
                houghTime_lab.Text = String.Format("耗时: {0} ms", watch.ElapsedMilliseconds);
                #endregion

                #region draw circles
                _frame.CopyTo(_circleImage);
                foreach (CircleF circle in circles)
                {
                    //构造字符串,包含圆心、半径信息
                    String centerText = String.Format(" ({0}, {1}, R={2})", circle.Center.X, circle.Center.Y, circle.Radius);
                    //在图像中绘制检测到的圆心(在圆心处画半径为1的圆)
                    CvInvoke.Circle(_circleImage, Point.Round(circle.Center), 1, new Bgr(Color.Red).MCvScalar, 2);
                    //在图像中绘制检测到的圆轮廓
                    CvInvoke.Circle(_circleImage, Point.Round(circle.Center), (int)circle.Radius, new Bgr(Color.Red).MCvScalar, 2);
                    //在圆心处显示圆心、半径信息
                    CvInvoke.PutText(_circleImage, centerText, Point.Round(circle.Center),
                                     FontFace.HersheyPlain, 0.8, new Bgr(Color.DarkOrange).MCvScalar);
                }
                proImageBox.Image = _circleImage;
                #endregion
            }
            else if (rectDetect_radioButton.Checked)
            {
                #region Find triangles and rectangles
                watch.Reset(); watch.Start();
                //新建存储三角形圆形信息的两个List
                List <Triangle2DF> triangleList = new List <Triangle2DF>();
                List <RotatedRect> boxList      = new List <RotatedRect>(); //a box is a rotated rectangle

                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    CvInvoke.FindContours(_cannyFrame, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                    int count = contours.Size;
                    for (int i = 0; i < count; i++)
                    {
                        using (VectorOfPoint contour = contours[i])
                            using (VectorOfPoint approxContour = new VectorOfPoint())
                            {
                                CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                                if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
                                {
                                    if (approxContour.Size == 3)                      //The contour has 3 vertices, it is a triangle
                                    {
                                        Point[] pts = approxContour.ToArray();
                                        triangleList.Add(new Triangle2DF(
                                                             pts[0],
                                                             pts[1],
                                                             pts[2]
                                                             ));
                                    }
                                    else if (approxContour.Size == 4) //The contour has 4 vertices.
                                    {
                                        #region determine if all the angles in the contour are within [80, 100] degree
                                        bool            isRectangle = true;
                                        Point[]         pts         = approxContour.ToArray();
                                        LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                        for (int j = 0; j < edges.Length; j++)
                                        {
                                            double angle = Math.Abs(
                                                edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                            if (angle < 80 || angle > 100)
                                            {
                                                isRectangle = false;
                                                break;
                                            }
                                        }
                                        #endregion

                                        if (isRectangle)
                                        {
                                            boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                        }
                                    }
                                }
                            }
                    }
                }

                watch.Stop();
                rectTime_lab.Text = String.Format("耗时: {0} ms", watch.ElapsedMilliseconds);
                #endregion

                #region draw triangles and rectangles
                _frame.CopyTo(_triangleRectangleImage);
                foreach (Triangle2DF triangle in triangleList)
                {
                    CvInvoke.Polylines(_triangleRectangleImage, Array.ConvertAll(triangle.GetVertices(), Point.Round), true, new Bgr(Color.Blue).MCvScalar, 2);
                }
                foreach (RotatedRect box in boxList)
                {
                    CvInvoke.Polylines(_triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2);
                }
                proImageBox.Image = _triangleRectangleImage;
                #endregion
            }
            else if (canny_radioButton.Checked)
            {
                proImageBox.Image = _cannyFrame;
            }
        }
Exemplo n.º 17
0
        public static Image <Bgr, byte> CreateMapandBar(IPSResult src, int divide)
        {
            int dotSize        = 5;
            var sizemultiflier = 8;
            var imgshiftoffset = 10;
            var offset         = src.SpotDataList.Select(x => x.PlrPos.Rho / 10.0).Max();    // Padding Size
            var thcklist       = src.SpotDataList.Select(x => x.Thickness);

            // Choose Data Scale
            // normalization -> In trust region 95% , fit outlier data to boundary -> rescale from  0 to 255
            //var pinnedArr = GCHandle.Alloc( thcklist , GCHandleType.Pinned);

            //int size = Marshal.SizeOf(thcklist);
            //var pointer = Marshal.AllocHGlobal(size);
            //Marshal.StructureToPtr( thcklist , pointer , true );



            //var inputarr = new Mat(new int[] { 1, thcklist.Count() } , Emgu.CV.CvEnum.DepthType.Cv64F ,  pointer  );
            //CvInvoke.MeanStdDev( inputarr , ref mean , ref std );
            //Matrix<double> datas = new Matrix<double>(1, thcklist.Count() , pointer);
            //pinnedArr.Free();

            //var zscore = thcklist.Select( x => (x - mean.V0)/std.V0 )
            //							  .Select( x => x >  1.96 ? 1.96 :
            //											x < -1.96 ? -1.96
            //											: x).ToArray()
            //							  ;

            //var n = (double)thcklist.Count();
            //var mean = thcklist.Sum()/n;
            //var std = Math.Sqrt( thcklist.Select( x => Math.Pow(x - mean,2)).Sum() /  n );

            var min = thcklist.Min();
            var max = thcklist.Max();

            var cm = new ColorMap().Inferno_cm;

            // Interpolation


            var srcdatas = src.Result2TRThArr().Select(x => x).ToList();

            /*
             #region
             * // Extact Color Position
             * "".Print();
             * "".Print();
             * "".Print();
             * "STart Point".Print();
             * var xyCm2 = srcdatas.ToCartesianReslt()
             *                              .OrderBy( x=> x[0])
             *                              .ThenBy( x => x[1])
             *                              .AsParallel()
             *                              .AsOrdered()
             *                              //.Select( x => new double[] { x[0] , x[1] , ( x[2] - mean ) / std } )
             *                              //.Select( x => new double[] { x[0] , x[1] , x[2] >  1.96 ? 1.96 :
             *                              //										   x[2] < -1.96 ? -1.96
             *                              //										   : x[2] } )
             *                              .Select( x => new
             *                              {
             *                                      X  = offset + x[0] ,
             *                                      Y  = offset + x[1] ,
             *                                      Cm = (min - max) == 0
             *                                                                              ? cm[127]  //color double[r,g,b]
             *                                                                              : cm[ (int)(( x[2] -min )/(max - min)*255) ] ,
             *                                      Gry = (int)(( x[2] -min )/(max - min)*255 + 1)
             *                              }).ToList();
             *
             * var circleLst2 = xyCm2.Select((x,i) =>
             *                                              new
             *                                              {
             *                                                      pos = new System.Drawing.Point(
             *                                                                                                                              (int)(x.X*sizemultiflier)+imgshiftoffset,
             *                                                                                                                              (int)(x.Y*sizemultiflier)+imgshiftoffset),
             *
             *                                                      color = new MCvScalar(x.Cm[2]*255 , x.Cm[1]*255 , x.Cm[0]*255).Act( test22 )
             *                                              }).ToArray();
             * "End Point".Print();
             * "".Print();
             * "".Print();
             * "".Print();
             #endregion
             * var imgsize = Math.Max(
             *       xyCm2.Select( x => x.X).Max(),
             *       xyCm2.Select( x => x.Y).Max()
             *      );
             *
             * var imgData = new byte[(int)(imgsize*sizemultiflier + imgshiftoffset*2.0) ,(int)(imgsize*sizemultiflier+imgshiftoffset*2.0),3];
             * Image<Bgr,byte> img = new Image<Bgr, byte>(imgData);
             * circleLst2.ActLoop( x => CvInvoke.Circle( img, x.pos, dotSize, x.color, -1, Emgu.CV.CvEnum.LineType.EightConnected ) );
             */

            for (int i = 0; i < divide; i++)
            {
                srcdatas = srcdatas.Interpol_Theta(1).Select(x => x).ToList()
                           .Interpol_Rho(1).Select(x => x).ToList();
            }

            // Extact Color Position
            var xyCm = srcdatas.ToCartesianReslt()
                       .OrderBy(x => x[0])
                       .ThenBy(x => x[1])
                       .AsParallel()
                       .AsOrdered()
                       //.Select( x => new double[] { x[0] , x[1] , ( x[2] - mean ) / std } )
                       //.Select( x => new double[] { x[0] , x[1] , x[2] >  1.96 ? 1.96 :
                       //										   x[2] < -1.96 ? -1.96
                       //										   : x[2] } )
                       .Select(x => new
            {
                X  = offset + x[0],
                Y  = offset + x[1],
                Cm = (min - max) == 0
                                                                                                        ? cm[127]  //color double[r,g,b]
                                                                                                        : cm[(int)((x[2] - min) / (max - min) * 255)],
                Gry = (int)((x[2] - min) / (max - min) * 255 + 1)
            }).ToList();

            var imgsize = Math.Max(
                xyCm.Select(x => x.X).Max(),
                xyCm.Select(x => x.Y).Max()
                );

            var imgData           = new byte[(int)(imgsize * sizemultiflier + imgshiftoffset * 2.0), (int)(imgsize * sizemultiflier + imgshiftoffset * 2.0), 3];
            Image <Bgr, byte> img = new Image <Bgr, byte>(imgData);
            var circleLst         = xyCm.Select((x, i) =>
                                                new
            {
                pos = new System.Drawing.Point(
                    (int)(x.X * sizemultiflier) + imgshiftoffset,
                    (int)(x.Y * sizemultiflier) + imgshiftoffset),

                color = new MCvScalar(x.Cm[2] * 255, x.Cm[1] * 255, x.Cm[0] * 255)
            });



            circleLst.ActLoop(x => CvInvoke.Circle(img, x.pos, dotSize, x.color, -1, Emgu.CV.CvEnum.LineType.EightConnected));
            //circleLst.ActLoop( (x,i) => CvInvoke.Circle( grayimg , x.pos , dotSize , new MCvScalar( xyCm[i].Gry) , -1 , Emgu.CV.CvEnum.LineType.EightConnected ) );

            img = img.Median(5);
            img = img.SmoothGaussian(3);

            return(img);
        }
Exemplo n.º 18
0
        /// <summary>
        /// 调用函数计算
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void btnMethodCal_Click(object sender, EventArgs e)
        {
            //释放内存
            ClearMemory();

            pictureBox1.Image     = null;
            pictureBox2.Image     = null;
            btnCaptureImg.Enabled = false;
            OpenFileDialog openFile = new OpenFileDialog();

            if (openFile.ShowDialog() == DialogResult.OK)
            {
                myImg = new Image <Bgr, Byte>(openFile.FileName);
            }
            else
            {
                return;
            }
            ////TODO:处理图像
            //cornerPointK = detectCorner.GetCornerAndK(myImg.Bitmap);
            ////if (cornerPointK.Corner.X==-1||cornerPointK.Corner.Y==-1)
            ////{
            ////    MessageBox.Show("未找到角点");
            ////}
            //CvInvoke.Circle(myImg,new Point((int)cornerPointK.Corner.X,(int)cornerPointK.Corner.Y),4,new MCvScalar(0,255,0),3 );
            ////CvInvoke.Circle(myImg,new Point((int)cornerPointK.Corner.X,(int)cornerPointK.Corner.Y),4,new MCvScalar(0,255,0),3 );
            //CvInvoke.PutText(myImg, string.Format("x={0:0.##}  y={1:0.##}", cornerPointK.Corner.X, cornerPointK.Corner.Y), new Point((int)cornerPointK.Corner.X - 60, (int)cornerPointK.Corner.Y + 20), Emgu.CV.CvEnum.FontFace.HersheyPlain, 2, new MCvScalar(0, 255, 0), 1);
            ////CvInvoke.PutText(myImg, string.Format("x={0:0.##}  y={1:0.##}", cornerPointK.Corner.X, cornerPointK.Corner.Y), new Point((int)cornerPointK.Corner.X - 60, (int)cornerPointK.Corner.Y + 20), Emgu.CV.CvEnum.FontFace.HersheyPlain, 2, new MCvScalar(0, 255, 0), 1);
            //txtK1.Text = "LineK1 = "+cornerPointK.LineK1.ToString()+"====="+"角点X:"+cornerPointK.Corner.X;
            //txtK2.Text = "LineK2 = " + cornerPointK.LineK2.ToString() + "=====" + "角点Y:" + cornerPointK.Corner.Y;

            //TODO:处理图像
            DateTime beforDT = DateTime.Now;//计时开始

            //使用DLL
            //detectCenterAndSlop.GetCornerAndSlope(myImg.Bitmap,0);
            //double robotRotated = detectCenterAndSlop.LineK1 < detectCenterAndSlop.LineK2 ? detectCenterAndSlop.LineK1 : detectCenterAndSlop.LineK2;
            //PointF robotCenter = detectCenterAndSlop.Center;

            cornerPointK = detectCorner.GetCornerAndK(myImg.Bitmap, 1);

            DateTime afterDT = DateTime.Now;
            TimeSpan ts      = afterDT.Subtract(beforDT);//计时结束

            lblSpanTime.Text = "HK算法耗时:" + ts + "S";

            LineSegment2D[] lines = detectCorner.GetLinesByHough(myImg.Bitmap, 0);
            foreach (LineSegment2D line in lines)
            {
                myImg.Draw(line, new Bgr(Color.Red), 10);
                detectCorner.binaryImg.Draw(line, new Gray(125), 3);
            }

            Image <Bgr, byte> roiImage = GetROI(myImg, ROI);

            for (int c = 0; c < cornerPointK.Corner.Count; c++)
            {
                //设置伪彩色
                CvInvoke.Circle(myImg, new Point((int)cornerPointK.Corner[c].X, (int)cornerPointK.Corner[c].Y), 10, new MCvScalar(0, 0, 255), 10);
                CvInvoke.Circle(detectCorner.binaryImg, new Point((int)cornerPointK.Corner[c].X, (int)cornerPointK.Corner[c].Y), 10, new MCvScalar(0, 0, 255), 10);

                //CvInvoke.Circle(myImg,new Point((int)cornerPointK.Corner.X,(int)cornerPointK.Corner.Y),4,new MCvScalar(0,255,0),3 );
                CvInvoke.PutText(myImg, string.Format("x={0:0.##}  y={1:0.##}", cornerPointK.Corner[c].X, cornerPointK.Corner[c].Y),
                                 new Point((int)cornerPointK.Corner[c].X + 30, (int)cornerPointK.Corner[c].Y + 20), FontFace.HersheyPlain, 10, new MCvScalar(0, 25, 255), 10);
                txtK2.Text += string.Format("x={0:0.##}  y={1:0.##}", cornerPointK.Corner[c].X, cornerPointK.Corner[c].Y) + "\t\t\t";
            }
            //CvInvoke.Circle(myImg, new Point((int)cornerPointK.Center.X, (int)cornerPointK.Center.Y), 14, new MCvScalar(255, 255, 0), 14);
            CvInvoke.PutText(myImg, string.Format("x={0:0.##}  y={1:0.##}", cornerPointK.Center.X, cornerPointK.Center.Y),
                             new Point((int)cornerPointK.Center.X - 60, (int)cornerPointK.Center.Y + 80), FontFace.HersheyPlain, 12, new MCvScalar(255, 255, 0), 11);

            //临时用
            CvInvoke.PutText(myImg, string.Format("theta1={0:0.##}  theta2={1:0.##}", cornerPointK.LineK1 * 180f / Math.PI, cornerPointK.LineK2 * 180f / Math.PI),
                             new Point((int)cornerPointK.Corner[0].X - 120, (int)cornerPointK.Corner[0].Y - 30), FontFace.HersheyPlain, 2, new MCvScalar(255, 2, 0), 2);
            txtK1.Text = string.Format("K1={00:0.####}", cornerPointK.LineK1 * 180f / Math.PI) + "\t\t" + string.Format("K2={00:0.####}", cornerPointK.LineK2 * 180f / Math.PI);
            //txtK1.Text += "\r\n"+"机器人法兰中心旋转角度:" +( robotRotated * 180f / Math.PI).ToString();
            pictureBox1.Image = myImg.ToBitmap();
            pictureBox2.Image = detectCorner.binaryImg.ToBitmap();
            //pictureBox2.Image = myImg.ToBitmap();
        }
        private void FindFaceFeatures()
        {
            Marshal.Copy(grayFrame.DataPointer, grayBytes, 0, img_size);
            FindFaceEyesIrisAsync().ContinueWith((tsk) =>
            {
                RetrieveResult retrieveResult = tsk.Result;
                if (retrieveResult == null)
                {
                    Dispatcher.BeginInvoke((Action)(() =>
                    {
                        BitmapSource grayImage = BitmapSource.Create(
                            img_width, img_height,
                            96.0, 96.0, PixelFormats.Gray8,
                            new BitmapPalette(new System.Windows.Media.Color[] { System.Windows.Media.Color.FromRgb(0, 0, 0), System.Windows.Media.Color.FromRgb(255, 255, 255) }),
                            grayBytes, img_width);
                        resultWindow.SetResult(grayImage, grayImage);
                        lock (_locker)
                        {
                            isReady = true;
                            if ((capture.Grab() == false))
                            {
                                App.Current.Shutdown();
                            }
                        }
                    }), DispatcherPriority.Render);
                    return;
                }

                FaceRetrieveResult faceRetrieveResult   = retrieveResult.Face;
                EyeRetrieveResult eyeRetrieveResult     = retrieveResult.Eye;
                PupilRetrieveResult pupilRetrieveResult = retrieveResult.Pupil;
                Emgu.CV.Structure.MCvScalar whiteColor  = new Emgu.CV.Structure.MCvScalar(100, 255, 255);
                Emgu.CV.Structure.MCvScalar redColor    = new Emgu.CV.Structure.MCvScalar(100, 255, 100);
                if (faceRetrieveResult.HasFace)
                {
                    RECT face = faceRetrieveResult.Face;
                    CvInvoke.Rectangle(cameraFrame, face, whiteColor, 3);
                    if (eyeRetrieveResult.HasLeftEye)
                    {
                        RECT leftEye     = eyeRetrieveResult.LeftEye;
                        CIRCLE leftPupil = pupilRetrieveResult.LeftPupil;
                        CvInvoke.Rectangle(cameraFrame, leftEye, whiteColor, 3);
                        CvInvoke.Circle(cameraFrame, leftPupil.Center, leftPupil.Radius, redColor, 2);
                    }
                    if (eyeRetrieveResult.HasRightEye)
                    {
                        RECT rightEye     = eyeRetrieveResult.RightEye;
                        CIRCLE rightPupil = pupilRetrieveResult.RightPupil;
                        CvInvoke.Rectangle(cameraFrame, rightEye, whiteColor, 3);
                        CvInvoke.Circle(cameraFrame, rightPupil.Center, rightPupil.Radius, redColor, 2);
                    }
                }

                Dispatcher.BeginInvoke((Action)(() =>
                {
                    OnFeatureRead(eyeRetrieveResult, pupilRetrieveResult);
                    BitmapSource grayImage = BitmapSource.Create(
                        img_width, img_height,
                        96.0, 96.0, PixelFormats.Gray8,
                        new BitmapPalette(new System.Windows.Media.Color[] { System.Windows.Media.Color.FromRgb(0, 0, 0), System.Windows.Media.Color.FromRgb(255, 255, 255) }),
                        grayBytes, img_width);
                    BitmapSource resultImage = Bitmap2BitmapImage(cameraFrame.Bitmap);
                    resultWindow.SetResult(grayImage, resultImage);

                    lock (_locker)
                    {
                        isReady = true;
                        if ((capture.Grab() == false))
                        {
                            App.Current.Shutdown();
                        }
                    }
                }), DispatcherPriority.Render);
            });
        }
Exemplo n.º 20
0
        // Tìm vị trí và cắt bảng điểm ra
        // Đầu vào là ảnh màu học bạ
        // Output:
        //      - Ảnh học bạ detectImg có vị trí của bảng điểm : hình chữ nhật màu đỏ xác định vị trí bảng điểm
        //      - Ảnh bảng điểm transcriptImage: chỉ có cột các môn và điểm của từng môn
        public static void Detect(Mat source, out Image <Bgr, byte> detectImg, out Mat transcriptImage)
        {
            Mat resizeImage = new Mat();
            Mat binaryImage = new Mat();
            Mat dilateImage = new Mat();
            Mat observedImage;

            var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));

            // Thay đổi kích thước ảnh ban đầu về ảnh 450x600 pixel
            CvInvoke.Resize(source, resizeImage, new Size(450, 650));

            // Chuyển sang ảnh Gray
            Image <Gray, byte> resAfter = new Image <Gray, byte>(resizeImage.Bitmap);

            // Thay đổi mức độ sáng và độ nét của ảnh
            resAfter._GammaCorrect(5.0);
            observedImage = resAfter.Clone().Mat;

            // Đưa về ảnh nhị phân
            CvInvoke.Threshold(resAfter, binaryImage, 200, 255, ThresholdType.BinaryInv);

            // Dòng này không dùng =))
            CvInvoke.Dilate(binaryImage, dilateImage, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));

            // Dùng thuật toán Hough để tìm các đường thẳng có trong ảnh
            LineSegment2D[] lines = CvInvoke.HoughLinesP(binaryImage, 2, Math.PI / 2, 50, 20);

            CvInvoke.Imshow("binaryImage asdasdsd", binaryImage);

            List <Linear> linears = new List <Linear>();

            // Chuyển kết quả của thuật toán Hough (đường thẳng xác định bởi 2 điểm) sang dạng phương trình ax + by + c = 0
            var listIline = new List <ILine>();

            Mat view = new Mat(resizeImage.Size, DepthType.Cv8U, 3);

            view.SetTo(new MCvScalar(0, 0, 0));

            for (int i = 0; i < lines.Length; i++)
            {
                Linear line;
                line.a  = -(lines[i].P2.Y - lines[i].P1.Y);
                line.b  = lines[i].P2.X - lines[i].P1.X;
                line.M0 = lines[i].P1;
                line.c  = line.a * (-line.M0.X) + line.b * (-line.M0.Y);

                linears.Add(line);

                // create linear;
                var iline = new ILine(lines[i].P1, lines[i].P2);
                listIline.Add(iline);
            }

            // Phân loại các đường: Nếu khoảng cách giữa 2 đường quá nhỏ thì nhóm chung lại 1 nhóm
            var classifyILine = DBSCANofILine(listIline, 8);

            //Vẽ các đường lên ảnh
            for (var i = 0; i < classifyILine.Count; i++)
            {
                var A = new Point();
                var B = new Point();

                if (classifyILine[i][0].tanAlpha == 0)
                {
                    //A = classifyILine[i][0].FindIntersectPoint(new ILine(new Point(0, 0), new Point(0, view.Size.Height)));
                    //B = classifyILine[i][0].FindIntersectPoint(new ILine(new Point(view.Size.Width, 0), new Point(view.Size.Width, view.Size.Height)));

                    A.Y = classifyILine[i][0].M.Y;
                    B.X = view.Size.Width;
                    B.Y = A.Y;
                }

                if (classifyILine[i][0].tanAlpha == Double.MaxValue)
                {
                    //A = classifyILine[i][0].FindIntersectPoint(new ILine(new Point(0, 0), new Point(view.Size.Width, 0)));
                    //B = classifyILine[i][0].FindIntersectPoint(new ILine(new Point(0, view.Size.Height), new Point(view.Size.Width, view.Size.Height)));

                    A.X = classifyILine[i][0].M.X;
                    B.X = A.X;
                    B.Y = view.Size.Height;
                }

                //if(classifyILine[i][0].tanAlpha < Math.Tan(Math.PI / 6))
                CvInvoke.Line(resAfter, A, B, new MCvScalar(0, 0, 0), 1);
            }

            // Phân loại các đường:
            //      - list_2[0] : các đường thẳng song song Ox
            //      - list_2[1] : các đường thẳng song song Oy
            var list_2 = ClassifyXY(classifyILine);

            // Tìm các đường thẳng song song có khoảng cách gần như không đổi (khoảng cách các đường thẳng phân chia các môn trong bảng điểm)
            var startIndex = 0;
            var endIndex   = 0;
            var step       = 0.0;
            var count      = 0;

            var currentStartIndex = 0;
            var currentEndIndex   = 0;
            var currentStep       = 0.0;
            var currentCount      = 0;

            for (var i = 0; i < list_2[0].Count - 1; i++)
            {
                if (i == 0 || Math.Abs(currentStep - list_2[0][i].Distance(list_2[0][i + 1].M)) < 5)
                {
                    if (i == 0)
                    {
                        currentStep = list_2[0][i].Distance(list_2[0][i + 1].M);
                    }
                    currentCount++;
                    currentEndIndex++;
                    if (currentCount > count)
                    {
                        startIndex = currentStartIndex;
                        endIndex   = currentEndIndex;
                        count      = currentCount;
                        step       = currentStep;
                    }
                }
                else
                {
                    currentStartIndex = i;
                    currentEndIndex   = i;
                    currentCount      = 0;
                    currentStep       = list_2[0][i].Distance(list_2[0][i + 1].M);
                }
            }

            //if(startIndex > 1) startIndex -= 2;
            endIndex++;

            // Đoạn này không dùng : Debug để xem có lấy đúng các đường thẳng của bảng hay không
            for (var i = startIndex; i <= endIndex; i++)
            {
                var A = new Point();
                var B = new Point();

                if (list_2[0][i].tanAlpha == 0)
                {
                    //A = classifyILine[i][0].FindIntersectPoint(new ILine(new Point(0, 0), new Point(0, view.Size.Height)));
                    //B = classifyILine[i][0].FindIntersectPoint(new ILine(new Point(view.Size.Width, 0), new Point(view.Size.Width, view.Size.Height)));

                    A.Y = list_2[0][i].M.Y;
                    B.X = view.Size.Width;
                    B.Y = A.Y;
                }

                if (list_2[0][i].tanAlpha == Double.MaxValue)
                {
                    //A = classifyILine[i][0].FindIntersectPoint(new ILine(new Point(0, 0), new Point(view.Size.Width, 0)));
                    //B = classifyILine[i][0].FindIntersectPoint(new ILine(new Point(0, view.Size.Height), new Point(view.Size.Width, view.Size.Height)));

                    A.X = list_2[0][i].M.X;
                    B.X = A.X;
                    B.Y = view.Size.Height;
                }

                if (list_2[0][i].tanAlpha < Math.Tan(Math.PI / 6))
                {
                    CvInvoke.Line(view, A, B, new MCvScalar(255, 255, 255), 1);
                }
            }


            // Cắt toàn bộ bảng điểm: chỉ có giới hạn bên trên và bên dưới - ảnh vẫn chứa phần ghi chú và phần chữ ký giáo viên
            var rectTransciptX = new Rectangle(new Point(0, list_2[0][startIndex].M.Y), new Size(resizeImage.Size.Width, list_2[0][endIndex].M.Y - list_2[0][startIndex].M.Y));

            var rectTransciptXImg = new Mat(binaryImage, rectTransciptX);

            //using (var model = CvInvoke.Imread(System.IO.Directory.GetCurrentDirectory() + "\\model-3.png", ImreadModes.Grayscale))
            //{
            //    //FeatureMatching.Init(model);
            //    //var transcriptXX = new Mat(observedImage, rectTransciptX);
            //    //long matchTime;

            //    //VectorOfPoint pointsMatching = FeatureMatching.Detect(transcriptXX, out matchTime);

            //    //Mat r = DrawMatches.Draw(model, transcriptXX, out matchTime);
            //    //CvInvoke.Imshow("imgasd", r);

            //    //CvInvoke.Polylines(transcriptXX, pointsMatching.ToArray(), true, new MCvScalar(0, 0, 0), 2);

            //    //Point[] listPoint = pointsMatching.ToArray().OrderBy(x => x.X).ToArray();

            //    //CvInvoke.Imshow("tran", transcriptXX);


            //}



            // Cắt phần bảng điểm

            // Dùng các đặc trưng của ảnh sẽ phân ra đc các vùng: tách vùng bảng điểm với vùng chữ ký
            // Kết hợp với các đường thẳng song song Oy trong list_2[1] để lấy vị trí của bảng điểm

            using (var transcriptBefore = new Mat(observedImage, rectTransciptX))
                using (var keyPointImage = new Image <Bgr, byte>(transcriptBefore.Size.Width, transcriptBefore.Size.Height))
                {
                    //Tìm đặc trưng của ảnh rồi vẽ lên ảnh nhị phân khác
                    var keyPoints = FeatureMatching.FindFeature(transcriptBefore);

                    keyPointImage.SetValue(new MCvScalar(0, 0, 0));

                    for (var i = 0; i < keyPoints.Size; i++)
                    {
                        CvInvoke.Circle(keyPointImage, new Point((int)keyPoints[i].Point.X, (int)keyPoints[i].Point.Y), 2, new MCvScalar(255, 255, 255), 2);
                    }
                    var histogramY = new List <int>();

                    for (var i = 0; i < keyPointImage.Size.Width; i++)
                    {
                        histogramY.Add(0);
                        for (var j = 0; j < keyPointImage.Size.Height; j++)
                        {
                            if (keyPointImage.Data[j, i, 0] + keyPointImage.Data[j, i, 1] + keyPointImage.Data[j, i, 2] > 0)
                            {
                                histogramY[i]++;
                            }
                        }
                    }

                    var keyPointBinary = new Mat(keyPointImage.Size, DepthType.Cv8U, 1);
                    CvInvoke.CvtColor(keyPointImage, keyPointBinary, ColorConversion.Bgr2Gray);
                    CvInvoke.Threshold(keyPointBinary, keyPointBinary, 100, 255, ThresholdType.Binary);

                    var avg = 80;

                    var start_transcript = 0;

                    for (var i = 0; i < histogramY.Count; i++)
                    {
                        if (histogramY[i] > avg)
                        {
                            for (var j = 0; j < list_2[1].Count; j++)
                            {
                                if (Math.Abs(list_2[1][j].M.X - i) < 20)
                                {
                                    start_transcript = j;
                                    CvInvoke.Rectangle(keyPointBinary, new Rectangle(new Point(0, 0), new Size(list_2[1][j].M.X, keyPointBinary.Size.Height)), new MCvScalar(0, 0, 0), -1);
                                    CvInvoke.Rectangle(keyPointImage, new Rectangle(new Point(0, 0), new Size(list_2[1][j].M.X, keyPointBinary.Size.Height)), new MCvScalar(0, 0, 0), -1);
                                    CvInvoke.Line(keyPointImage, new Point(list_2[1][j].M.X, 0), new Point(list_2[1][j].M.X, keyPointImage.Size.Height), new MCvScalar(255, 0, 0), 3);
                                    break;
                                }
                            }
                            break;
                        }
                    }

                    var count_area = 0;

                    for (var i = start_transcript; i < list_2[1].Count - 1; i++)
                    {
                        CvInvoke.Circle(keyPointImage, new Point(list_2[1][i].M.X, list_2[0][startIndex + 3].M.Y - rectTransciptX.Y), 3, new MCvScalar(0, 0, 255), 2);
                        CvInvoke.Circle(keyPointImage, new Point(list_2[1][i].M.X, list_2[0][startIndex + 4].M.Y - rectTransciptX.Y), 3, new MCvScalar(0, 0, 255), 2);
                        var rect_area = new Rectangle(new Point(list_2[1][i].M.X, list_2[0][startIndex + 3].M.Y - rectTransciptX.Y), new Size(list_2[1][i + 1].M.X - list_2[1][i].M.X, list_2[0][startIndex + 4].M.Y - list_2[0][startIndex + 3].M.Y));
                        CvInvoke.Rectangle(keyPointImage, rect_area, new MCvScalar(255, 255, 00), 2);
                        using (var rectImg2 = new Mat(binaryImage, new Rectangle(new Point(list_2[1][i].M.X + 2, list_2[0][startIndex + 3].M.Y + 2), new Size(list_2[1][i + 1].M.X - list_2[1][i].M.X - 4, list_2[0][startIndex + 4].M.Y - list_2[0][startIndex + 3].M.Y - 4))))
                        {
                            var nonZR = (double)CvInvoke.CountNonZero(rectImg2) / (rectImg2.Size.Width * rectImg2.Size.Height);

                            if (nonZR < 0.02)
                            {
                                count_area = i;
                                break;
                            }
                        }
                    }

                    CvInvoke.Line(keyPointImage, new Point(list_2[1][count_area].M.X, 0), new Point(list_2[1][count_area].M.X, keyPointImage.Size.Height), new MCvScalar(255, 0, 0), 3);

                    // Cắt lấy ảnh bảng điểm có kích thước ban đầu
                    var transcriptColor = new Image <Bgr, byte>(resAfter.Bitmap);

                    CvInvoke.Rectangle(transcriptColor, new Rectangle(new Point(list_2[1][start_transcript].M.X, list_2[0][startIndex - 2].M.Y), new Size(list_2[1][count_area].M.X - list_2[1][start_transcript].M.X, list_2[0][endIndex].M.Y - list_2[0][startIndex - 2].M.Y)), new MCvScalar(0, 0, 255), 2);
                    CvInvoke.Imshow("Transcript Detected", transcriptColor);


                    for (var i = 0; i < classifyILine.Count; i++)
                    {
                        var A = new Point();
                        var B = new Point();

                        CvInvoke.Line(transcriptColor, A, B, new MCvScalar(0, 0, 255), 1);
                    }

                    CvInvoke.Imshow("transcriptColor", transcriptColor);

                    detectImg = transcriptColor.Clone();

                    double scaleX = source.Size.Width / 450 * 1.2;
                    double scaleY = source.Size.Height / 650 * 1.2;

                    var rect_scale = new Rectangle(
                        new Point((int)(list_2[1][start_transcript].M.X * scaleX), (int)(list_2[0][startIndex - 2].M.Y * scaleY)),
                        new Size((int)((list_2[1][count_area].M.X - list_2[1][start_transcript].M.X) * scaleX), (int)((list_2[0][endIndex].M.Y - list_2[0][startIndex - 2].M.Y) * scaleY)));

                    var rect_scale_transcript = new Rectangle(
                        new Point((int)(list_2[1][start_transcript].M.X * scaleX), (int)(list_2[0][startIndex].M.Y * scaleY)),
                        new Size((int)((list_2[1][count_area].M.X - list_2[1][start_transcript].M.X) * scaleX), (int)((list_2[0][endIndex].M.Y - list_2[0][startIndex].M.Y) * scaleY)));

                    var transcrip_scale = new Mat(source, rect_scale);
                    var transcript_t    = new Mat(source, rect_scale_transcript);

                    var bfff = transcrip_scale.ToImage <Bgr, byte>();
                    bfff._GammaCorrect(5.0);

                    var transcipt_origin = transcript_t.ToImage <Bgr, byte>();
                    transcipt_origin._GammaCorrect(5.0);


                    var line_on_transcript = FindLine(transcipt_origin.Mat, 100, 50);

                    var lineX  = line_on_transcript[0];
                    var lineY  = line_on_transcript[1];
                    var random = new Random();
                    for (var i = 1; i < 10; i++) // toan, ly, hoa, sinh, tin, van, su, dia, TA
                    {
                        var p1 = new Point(lineY[lineY.Count - 3].M.X, lineX[i].M.Y);
                        var p2 = new Point(lineY[lineY.Count - 2].M.X, lineX[i].M.Y);
                        var p3 = new Point(lineY[lineY.Count - 1].M.X, lineX[i].M.Y);

                        var p4 = new Point(lineY[lineY.Count - 3].M.X, lineX[i - 1].M.Y);
                        var p5 = new Point(lineY[lineY.Count - 2].M.X, lineX[i - 1].M.Y);
                        var p6 = new Point(lineY[lineY.Count - 1].M.X, lineX[i - 1].M.Y);

                        //hk 1
                        //CvInvoke.Circle(transcipt_origin, p1, 6, new MCvScalar(255,0,0), 2);
                        var pic1 = new Mat(transcipt_origin.Mat, new Rectangle(new Point(p4.X + 10, p4.Y + 5), new Size(p2.X - p1.X - 20, p1.Y - p4.Y - 8)));
                        //CvInvoke.Imshow("mark1 " + i, pic1);
                        DetectDigit(pic1, "mark1 - " + i + " - " + random.Next());
                        //hk 2
                        //CvInvoke.Circle(transcipt_origin, p2, 6, new MCvScalar(255, 0, 0), 2);
                        var pic2 = new Mat(transcipt_origin.Mat, new Rectangle(new Point(p5.X + 10, p5.Y + 5), new Size(p3.X - p2.X - 20, p2.Y - p5.Y - 8)));
                        //CvInvoke.Imshow("mark2 " + i, pic2);
                        DetectDigit(pic2, "mark2 - " + i + " - " + random.Next());
                        //CN
                        //CvInvoke.Circle(transcipt_origin, p3, 6, new MCvScalar(255, 0, 0), 2);
                        var pic3 = new Mat(transcipt_origin.Mat, new Rectangle(new Point(p6.X + 10, p6.Y + 5), new Size(transcipt_origin.Size.Width - p3.X - 20, p3.Y - p6.Y - 8)));
                        CvInvoke.Imshow("mark3 " + i, pic3);

                        DetectDigit(pic3, "mark3 - " + i + " - " + random.Next());
                    }

                    transcriptImage = transcipt_origin.Mat;
                    CvInvoke.Imshow("transcipt_origin", transcriptImage);
                }
        }
Exemplo n.º 21
0
        private void ProcessFrameRTSP(object sender, EventArgs e)
        {
            Point px = new Point(px1, px2);
            Point py = new Point(py1, py2);

            if (cap != null)
            {
                cap.Retrieve(frame, 0);
                currentframe = frame.ToImage <Bgr, byte>();


                Mat mask = new Mat();
                sub.Apply(currentframe, mask);

                Mat kernelOp = new Mat();
                Mat kernelCl = new Mat();
                Mat kernelEl = new Mat();
                Mat Dilate   = new Mat();
                kernelOp = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));
                kernelCl = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(11, 11), new Point(-1, -1));
                var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));

                CvInvoke.GaussianBlur(mask, mask, new Size(13, 13), 1.5);
                CvInvoke.MorphologyEx(mask, mask, MorphOp.Open, kernelOp, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
                CvInvoke.MorphologyEx(mask, mask, MorphOp.Close, kernelCl, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
                CvInvoke.Dilate(mask, mask, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Threshold(mask, mask, 127, 255, ThresholdType.Binary);

                detect.Detect(mask.ToImage <Gray, byte>(), blobs);
                blobs.FilterByArea(100, int.MaxValue);
                tracks.Update(blobs, 20.0, 1, 10);

                Image <Bgr, byte> result = new Image <Bgr, byte>(currentframe.Size);
                using (Image <Gray, Byte> blobMask = detect.DrawBlobsMask(blobs))
                {
                    frame.CopyTo(result, blobMask);
                }
                CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 0, 255), 1);
                foreach (KeyValuePair <uint, CvTrack> pair in tracks)
                {
                    if (pair.Value.Inactive == 0) //only draw the active tracks.
                    {
                        int cx = Convert.ToInt32(pair.Value.Centroid.X);
                        int cy = Convert.ToInt32(pair.Value.Centroid.Y);

                        CvBlob b     = blobs[pair.Value.BlobLabel];
                        Bgr    color = detect.MeanColor(b, frame.ToImage <Bgr, Byte>());
                        currentframe.Draw(pair.Value.BoundingBox, new Bgr(0, 0, 255), 1);
                        //Point[] contour = b.GetContour();
                        // result.Draw(contour, new Bgr(0, 0, 255), 1);

                        Point center = new Point(cx, cy);
                        CvInvoke.Circle(currentframe, center, 1, new MCvScalar(255, 0, 0), 2);
                        if (center.Y <= px.Y + 10 && center.Y > py.Y - 10 && center.X <= py.X && center.X > px.X)
                        {
                            if (pair.Key.ToString() != "")
                            {
                                if (!carid.Contains(pair.Key.ToString()))
                                {
                                    carid.Add(pair.Key.ToString());
                                    if (carid.Count == 20)
                                    {
                                        carid.Clear();
                                    }

                                    carcount++;
                                    Thread logTh = new Thread(SendMail);
                                    logTh.Start();
                                }
                            }
                            CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 255, 0), 2);
                        }
                    }
                }

                if (isRecording)
                {
                    if (firstFrameTime != null)
                    {
                        writer.WriteVideoFrame(currentframe.Bitmap);
                    }
                    else
                    {
                        writer.WriteVideoFrame(currentframe.Bitmap);
                        firstFrameTime = DateTime.Now;
                    }
                }

                CvInvoke.PutText(currentframe, "Count :" + carcount.ToString(), new Point(10, 25), FontFace.HersheySimplex, 1, new MCvScalar(0, 255, 255), 2, LineType.AntiAlias);
                pictureBox1.Image = currentframe.Bitmap;

                if (firstCount == false && carcount == 15)
                {
                    Image_Name = cfg.PhotoSavePath + @"\" + "Car" + DateTime.Now.ToString("dd-mm-yyyy-hh-mm-ss") + ".jpg";
                    currentframe.Save(Image_Name);

                    firstCount = true;
                }

                // Thread th = new Thread(currentframepicBoxRtsp);
                // th.Start();
            }
        }
        private void Capture_ImageGrabbed1(object sender, EventArgs e)
        {
            try
            {
                Mat m = new Mat();

                capture.Retrieve(m);
                Mat gray = new Mat(m.Size, DepthType.Cv8U, 1);
                CvInvoke.CvtColor(m, gray, ColorConversion.Bgr2Gray);
                Mat blur = new Mat(gray.Size, DepthType.Cv8U, 1);
                CvInvoke.MedianBlur(gray, blur, 11);
                pictureBox1.Image = m.ToImage <Bgr, byte>().Bitmap;

                CircleF[] circles = CvInvoke.HoughCircles(blur, HoughType.Gradient, 2, gray.Rows / 16, 20, 150, 80, 100);

                if (circles.Count() >= 1)
                {
                    Mat copy = m.Clone();
                    //img is the image you applied Hough to
                    m.Save("test.png");
                    for (int i = 0; i < circles.Count(); i++)
                    {
                        CvInvoke.Circle(copy, Point.Round(circles[i].Center), (int)circles[i].Radius, new MCvScalar(255, 0, 0), 3, Emgu.CV.CvEnum.LineType.AntiAlias, 0);
                        pictureBox1.Image = copy.ToImage <Bgr, byte>().Bitmap;
                    }
                    for (int i = 0; i < circles.Count(); i++)
                    {
                        string num      = i.ToString();
                        string filepath = "test" + num + ".png";
                        if (File.Exists(filepath))
                        {
                            File.Delete(filepath);
                        }

                        Image <Gray, byte> mask = new Image <Gray, byte>(m.Width, m.Height);
                        CvInvoke.Circle(mask, Point.Round(circles[i].Center), (int)circles[i].Radius, new MCvScalar(255, 255, 255), -1, Emgu.CV.CvEnum.LineType.AntiAlias, 0);
                        Image <Bgr, byte> dest = new Image <Bgr, byte>(m.Width, m.Height);
                        m.CopyTo(dest, mask);
                        dest.Save(filepath);

                        MakePredictionRequest(filepath);
                    }


                    //-1 is to fill the area
                    //detectPlate();
                    if (capture != null)
                    {
                        capture.ImageGrabbed -= Capture_ImageGrabbed1;
                        capture.Stop();
                        capture = null;
                    }
                }
                if (empty == true)
                {
                    Console.WriteLine("The tray contains an empty plate.");
                }
                m.Dispose();
            }
            catch (Exception)
            {
            }
        }
Exemplo n.º 23
0
        // 각 이미지마다 해상도에 맞춰서 프로세싱 하자.
        public async void StartMultiProcessing(List <MultiAnalysisDatacs> srclist, Config cfg)
        {
            if (cfg == null)
            {
                System.Windows.MessageBox.Show(" Please Set Config First. ");
            }

            if (srclist.Count < 1)
            {
                return;
            }

            SaveFileDialog sfd = new SaveFileDialog();

            sfd.Title = "Choose folder and header name of Result";
            if (sfd.ShowDialog() == DialogResult.OK)
            {
                string basepath = Path.GetDirectoryName(sfd.FileName);
                string header   = Path.GetFileName(sfd.FileName);

                Console.WriteLine(header);
                evtMultiStart();
                await Task.Run(() => {
                    for (int k = 0; k < srclist.Count; k++)
                    {
                        try
                        {
                            var path = srclist[k].fullname;
                            BaseImg  = new Img(path);
                            RatioW   = BaseImg.Width / 800.0;
                            RatioH   = BaseImg.Height / 600.0;
                            //BaseImg = tempimg.Resize(800, 600, Inter.Area);
                            ClrOriginalImg = new ColorImg(path);//.Resize(800, 600, Inter.Area);
                            ClrImg         = ClrOriginalImg.Copy();

                            // Processing
                            var img = BaseImg.ThresholdBinary(new Gray(cfg.Threshold), new Gray(255))
                                      .SmoothMedian((int)cfg.Resolution * 10 + 1);
                            var contours = new VectorOfVectorOfPoint();

                            CvInvoke.FindContours(img, contours, null, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);

                            var cntrColorSet = CreateContour_ColorSet(Outercolor, Innercolor, cfg, contours);

                            List <VectorOfPoint> cntrlist = cntrColorSet.Item1;
                            List <MCvScalar> colorlist    = cntrColorSet.Item2;

                            // Center
                            var centers    = FindCenter(cntrlist);
                            var centerlist = new List <double[]>();

                            //Error
                            for (int i = 0; i < centers.Count(); i++)
                            {
                                CvInvoke.Circle(ClrImg, centers[i], 5, colorlist[i]);
                                var realx = (centers[i].X *RatioW *cfg.Resolution);
                                var realy = (centers[i].Y *RatioH *cfg.Resolution);

                                var x     = realx.ToString();
                                var y     = realy.ToString();
                                string xy = x + " , " + y + " (um)";

                                System.Drawing.Point textpos = new System.Drawing.Point(centers[i].X - 40 - (i * 25), centers[i].Y - 10 - (i * 25));
                                CvInvoke.PutText(ClrImg, xy, textpos, FontFace.HersheySimplex, 0.4, colorlist[i]);

                                centerlist.Add(new double[] { realx, realy });
                            }
                            double errorDistance = CalcDistance(centerlist);
                            ClrImg = CenterDiffDraw(centers, ClrImg);

                            System.Drawing.Point textdifpos = new System.Drawing.Point(centers[0].X + 40, centers[0].Y + 10);
                            CvInvoke.PutText(ClrImg, "Error : " + errorDistance.ToString("F4") + " (um)", textdifpos, FontFace.HersheySimplex, 0.4, new MCvScalar(153, 51, 153));

                            var res = ToBitmapSource(ClrImg);

                            var error = errorDistance.ToString("F4");
                            evtNumError(k, error);
                            srclist[k].error = error;
                            //Save Result

                            ClrImg.Save(basepath + "\\" + k.ToString() + "_" + header + "_" + srclist[k].name);
                        }
                        catch (Exception er)
                        {
                            er.ToString().Print();
                        }
                    }

                    try
                    {
                        StringBuilder stv = new StringBuilder();
                        stv.AppendLine("Nnmber,FileName,Error");
                        foreach (var item in srclist)
                        {
                            stv.AppendLine(item.no.ToString() + "," + item.name + "," + item.error);
                        }
                        File.WriteAllText(sfd.FileName + "_Result.csv", stv.ToString());
                    }
                    catch (Exception)
                    {
                        System.Windows.MessageBox.Show("Please check result file path or file is opened or not");
                    }
                });

                evtMultiEnd();
            }
        }
        void timer_Tick(object sender, EventArgs e)
        {
            sw.Start();

            currentFrame = capture.QueryFrame().ToImage<Hsv, byte>();
            currentFrame = currentFrame.AbsDiff(backgroundFrame);
            smoothedFrame = currentFrame.PyrDown().PyrUp();
            smoothedFrame._SmoothGaussian(3);
            filteredFrame = smoothedFrame.InRange(new Hsv(hueLower, saturationLower, valueLower), new Hsv(hueHigher, saturationHigher, valueHigher));

            outFrame = filteredFrame;//.Canny((cannyThresh), (cannyThreshLink));
            CvInvoke.FindContours(outFrame, contours, hierarchy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
            for (int i = 0; i < contours.Size; i++)
            {
                double area = CvInvoke.ContourArea(contours[i], false);
                if (area > maxArea)
                {
                    maxArea = area;
                    idx = i;
                }
            }
            image.SetValue(new MCvScalar(180, 0, 255));
            CvInvoke.DrawContours(image, contours, idx, new MCvScalar(255, 255, 255), 3);
            ContourArea.Text = maxArea.ToString();
            if (maxArea != 0)
            {
                CvInvoke.ConvexHull(contours[idx], cvh, false);
                CvInvoke.ConvexityDefects(contours[idx], cvh, cvd);
                moments = CvInvoke.Moments(contours[idx], true);
                centroid = new System.Drawing.Point((int) moments.GravityCenter.X, (int) moments.GravityCenter.Y);
                CvInvoke.Circle(image, centroid, 5, new MCvScalar(100,50,25), 1);
                if (contours.Size != 0)
                {
                    polyline = contours[idx].ToArray();

                    if (!cvd.IsEmpty && contours[idx].Size > 10)
                    {
                        Matrix<int> m = new Matrix<int>(cvd.Rows, cvd.Cols,
                                          cvd.NumberOfChannels);
                        cvd.CopyTo(m);
                        for (int i = 0; i < m.Rows; i++)
                        {
                            int startIdx = m.Data[i, 0];
                            int endIdx = m.Data[i, 1];
                            int fpIdx = m.Data[i, 2];
                            int depth = m.Data[i, 3];
                            
                            startPoint = polyline[startIdx];
                            endPoint = polyline[endIdx];
                            midPoint = new System.Drawing.Point(
                                (startPoint.X + endPoint.X) / 2, (startPoint.Y + endPoint.Y) / 2);
                            farthestPoint = polyline[fpIdx];
                            CvInvoke.Line(image, midPoint, farthestPoint, new MCvScalar(180, 255, 0));
                            CvInvoke.Line(image, startPoint, endPoint, new MCvScalar(180, 255, 255));
                        }
                    }

                    if(trained.Size!=0)
                    {
                        double match=1000000;
                        
                        int d = 0;
                        for (int i = 0; i  < trained.Size; i++)
                        {
                            double curr = CvInvoke.MatchShapes(contours[idx], trained[i], ContoursMatchType.I3);
                            if(curr < match)
                            {
                                d = i;
                                match = curr;
                            }

                        }
                        if(match<0.25)
                        {
                            ContourArea.Text = words[d];
                            image.Draw(words[d], centroid, FontFace.HersheyTriplex, 1, new Hsv(90,100, 100));
                        }
                        
                    }
                }
            }



            if (currentFrame != null)
            {
                sw.Stop();
                imgPros.Source = ToBitmapSource(outFrame);
                imgOrig.Source = ToBitmapSource(currentFrame);
                imgSmooth.Source = ToBitmapSource(image);

                
                sw.Reset();
            }

            maxArea = 0;
            idx = 0;

        }
Exemplo n.º 25
0
        private void RetrieveFrame(object sender, EventArgs arg)
        {
            Mat   diff   = new Mat();
            Point center = Point.Empty;

            // retrieve image from camera
            Mat frame = new Mat();

            Camera.Retrieve(frame);
            ImageSize = frame.Size;

            // generate diff image (before drawing stuff)
            if (!ImgRef.IsEmpty && !IsTuning)
            {
                CvInvoke.AbsDiff(frame, ImgRef, diff);
            }

            if (!IsTracking)
            {
                // find marker
                Rectangle boundingBox;
                if (Roi.IsEmpty)
                {
                    center = FindMarker(frame, out boundingBox);
                }
                else
                {
                    using (Mat crop = new Mat(frame.Clone(), Roi))
                    {
                        center = FindMarker(crop, out boundingBox);
                        if (!center.IsEmpty)
                        {
                            center.X      += Roi.X;
                            center.Y      += Roi.Y;
                            boundingBox.X += Roi.X;
                            boundingBox.Y += Roi.Y;
                        }
                    }
                }

                // store marker point
                if (ScheduleTakeReference && !center.IsEmpty)
                {
                    ImgRef        = frame.Clone();
                    PointRef      = center;
                    MarkerTracker = new TrackerCSRT();
                    MarkerTracker.Init(ImgRef, boundingBox);
                    ScheduleTakeReference = false;
                    Invoke(new Action(() => UpdateInstructionText()));
                }

                // draw marker
                if (!center.IsEmpty)
                {
                    CvInvoke.Circle(frame, center, 4, new MCvScalar(0, 140, 255), -1);
                    CvInvoke.Rectangle(frame, boundingBox, new MCvScalar(0, 140, 255), 1);
                }

                // draw ROI
                if (!Roi.IsEmpty)
                {
                    using (Mat dark = new Mat(frame.Rows, frame.Cols, frame.Depth, 3))
                    {
                        dark.SetTo(new MCvScalar(1, 1, 1));
                        CvInvoke.Rectangle(dark, Roi, new MCvScalar(2, 2, 2), -1);
                        CvInvoke.Multiply(frame, dark, frame, 0.5);
                    }
                }

                ImgBoxRef.Image = frame;
            }
            else
            {
                // tracker
                if (MarkerTracker.Update(frame, out Rectangle trackingRect))
                {
                    center = new Point(trackingRect.X + trackingRect.Width / 2, trackingRect.Y + trackingRect.Height / 2);
                    CvInvoke.Circle(frame, center, 4, new MCvScalar(0, 140, 255), -1);
                    CvInvoke.Rectangle(frame, trackingRect, new MCvScalar(0, 140, 255), 1);
                }

                ImgBoxLive.Image = frame;
            }

            // update diff image box
            if (!diff.IsEmpty)
            {
                MCvScalar color = new MCvScalar(0, 140, 255);
                if (!center.IsEmpty)
                {
                    CvInvoke.Circle(diff, center, 4, color, -1);
                }
                if (!PointRef.IsEmpty)
                {
                    CvInvoke.Circle(diff, PointRef, 4, color, -1);
                }
                if (!center.IsEmpty && !PointRef.IsEmpty)
                {
                    string dist = CalcDistance(center, PointRef).ToString("0.0");
                    CvInvoke.ArrowedLine(diff, PointRef, center, color);
                    CvInvoke.PutText(diff, dist, new Point(5, diff.Height - 5), FontFace.HersheyComplexSmall, 1, color);
                }
                ImgBoxDiff.Image = diff;
            }

            // update status text
            Invoke(new Action(() => UpdateStatusText(PointRef, center)));
        }
Exemplo n.º 26
0
    void GetCentroid()
    {
        //On récupère le plus grand contours de chaque couleurs
        Moments blueMoment = new Moments();
        Moments redMoment  = new Moments();

        if (biggestContourBlue != null)
        {
            blueMoment = CvInvoke.Moments(biggestContourBlue);
        }

        if (biggestContourRed != null)
        {
            redMoment = CvInvoke.Moments(biggestContourRed);
        }

        //On calcule le centroid de chacun de nos contours
        centroidBlue = new Point((int)(blueMoment.M10 / blueMoment.M00), (int)(blueMoment.M01 / blueMoment.M00));
        centroidRed  = new Point((int)(redMoment.M10 / redMoment.M00), (int)(redMoment.M01 / redMoment.M00));

        //On dessine un cercle sur le centroid (dans l'image)
        CvInvoke.Circle(imgWebCam, centroidBlue, 2, new MCvScalar(0, 0, 0));
        CvInvoke.Circle(imgWebCam, centroidRed, 2, new MCvScalar(0, 0, 0));

        //RED
        //On normalise le centroid en [0...1]
        float xR = ((float)centroidRed.X / (float)webCam.Width);
        float yR = ((float)centroidRed.Y / (float)webCam.Height);

        xR = 1f - xR;//Inversion de l'axe X de la caméra
        //yR = 1f - yR;
        Vector2 centroidR = new Vector2(xR, yR);

        //Vérification de la distance des points pour éviter le tremblotement lorsque l on tient un objet
        //On joue maintenant sur la table, donc le code est moins utile
        if (lastsPointsR.Count > 1)
        {
            Vector2 lastCentroid = lastsPointsR[lastsPointsR.Count - 1];

            float distance = Vector2.Distance(centroidR, lastCentroid);
            if (distance > deadZone)
            {
                lastsPointsR.Add(centroidR);
            }
            else
            {
                centroidR = lastCentroid;
            }
        }
        else
        {
            lastsPointsR.Add(centroidR);
        }

        if (lastsPointsR.Count > 40)
        {
            lastsPointsR.RemoveAt(0);
        }

        //LastPointsR et B contienne les 40 dernieres position de notre centroid (C'était une question que vous avez posé en cours)

        //pour du 16/9
        //On ramene la position de 0..1 en coordonnés pour le jeux compris entre -10 et 10 en X et -7 et 7 en Y
        float posXR = centroidR.x * 20f - 10f;
        float posYR = centroidR.y * 14f - 7f;

        //on change la position de la sphère en jeu selon la position référence de notre objet sur la caméra
        sphereRed.transform.position = new Vector3(posXR, posYR, 10);


        //BLUE
        //Même chose pour le bleu
        float xB = ((float)centroidBlue.X / (float)webCam.Width);
        float yB = ((float)centroidBlue.Y / (float)webCam.Height);

        xB = 1f - xB;
        //yB = 1f - yB;
        Vector2 centroidB = new Vector2(xB, yB);

        if (lastsPointsB.Count > 1)
        {
            Vector2 lastCentroid = lastsPointsB[lastsPointsB.Count - 1];

            float distance = Vector2.Distance(centroidB, lastCentroid);
            if (distance > deadZone)
            {
                lastsPointsB.Add(centroidB);
            }
            else
            {
                centroidB = lastCentroid;
            }
        }
        else
        {
            lastsPointsB.Add(centroidB);
        }

        if (lastsPointsB.Count > 40)
        {
            lastsPointsB.RemoveAt(0);
        }
        if (centroidB.x > 1000f)
        {
            centroidB = new Vector2(0.5f, 0.5f);
        }
        float posXB = centroidB.x * 20f - 10f;
        float posYB = centroidB.y * 14f - 7f;

        sphereBlue.transform.position = new Vector3(posXB, posYB, 10);
    }
Exemplo n.º 27
0
        public void PerformShapeDetection()
        {
            if (fileNameTextBox.Text != String.Empty)
            {
                StringBuilder msgBuilder = new StringBuilder("Performance: ");

                //Load the image from file and resize it for display
                Image <Bgr, Byte> img =
                    new Image <Bgr, byte>(fileNameTextBox.Text);
                //.Resize(2000, 2000, Emgu.CV.CvEnum.Inter.Linear, true);
                labelSize.Text = img.Width.ToString() + " , " + img.Height.ToString();
                //转为灰度级图像
                UMat uimage = new UMat();
                CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

                //use image pyr to remove noise 降噪,为了更准确的做边缘检测
                UMat pyrDown = new UMat();
                CvInvoke.PyrDown(uimage, pyrDown);
                CvInvoke.PyrUp(pyrDown, uimage);


                #region circle detection
                Stopwatch watch = Stopwatch.StartNew();

                //// 参数1:灰度级图像
                //// 参数2:处理图像用的方法,在这里是固定
                //double dp = 1;    // 参数3:dp,不懂
                //double minDist = 40;     // 参数4:两个圆中心的最小距离
                //double cannyThreshold = 30;    // 参数5:边缘检测阈值
                //double circleAccumulatorThreshold = 30;       // 参数6:累加器阈值(圆心重合点,越低的时候圆弧就越容易当成圆)
                //int minRadius = 20;
                //int maxRadius = 60;

                CircleF[] circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, dp, minDist, cannyThreshold, circleAccumulatorThreshold, minRadius, maxRadius);

                watch.Stop();
                msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                #region Canny and edge detection
                watch.Reset(); watch.Start();
                double cannyThresholdLinking = cannyThreshold;// 30;
                UMat   cannyEdges            = new UMat();
                CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);

                LineSegment2D[] lines = CvInvoke.HoughLinesP(
                    cannyEdges,
                    1,              //Distance resolution in pixel-related units
                    Math.PI / 45.0, //Angle resolution measured in radians.
                    20,             //threshold
                    30,             //min Line width
                    10);            //gap between lines

                watch.Stop();
                msgBuilder.Append(String.Format("Canny & Hough lines - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                #region Find triangles and rectangles
                watch.Reset(); watch.Start();
                List <Triangle2DF> triangleList = new List <Triangle2DF>();
                List <RotatedRect> boxList      = new List <RotatedRect>(); //a box is a rotated rectangle

                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                    int count = contours.Size;
                    for (int i = 0; i < count; i++)
                    {
                        using (VectorOfPoint contour = contours[i])
                            using (VectorOfPoint approxContour = new VectorOfPoint())
                            {
                                CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                                if (CvInvoke.ContourArea(approxContour, false) > 250) //only consider contours with area greater than 250
                                {
                                    if (approxContour.Size == 3)                      //The contour has 3 vertices, it is a triangle
                                    {
                                        Point[] pts = approxContour.ToArray();
                                        triangleList.Add(new Triangle2DF(
                                                             pts[0],
                                                             pts[1],
                                                             pts[2]
                                                             ));
                                    }
                                    else if (approxContour.Size == 4) //The contour has 4 vertices.
                                    {
                                        #region determine if all the angles in the contour are within [80, 100] degree
                                        bool            isRectangle = true;
                                        Point[]         pts         = approxContour.ToArray();
                                        LineSegment2D[] edges       = PointCollection.PolyLine(pts, true);

                                        for (int j = 0; j < edges.Length; j++)
                                        {
                                            double angle = Math.Abs(
                                                edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j]));
                                            if (angle < 80 || angle > 100)
                                            {
                                                isRectangle = false;
                                                break;
                                            }
                                        }
                                        #endregion

                                        if (isRectangle)
                                        {
                                            boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                        }
                                    }
                                }
                            }
                    }
                }

                watch.Stop();
                msgBuilder.Append(String.Format("Triangles & Rectangles - {0} ms; ", watch.ElapsedMilliseconds));
                #endregion

                originalImageBox.Image = img;
                this.Text = msgBuilder.ToString();

                #region draw triangles and rectangles
                Mat triangleRectangleImage = new Mat(img.Size, DepthType.Cv8U, 3);
                triangleRectangleImage.SetTo(new MCvScalar(0));
                foreach (Triangle2DF triangle in triangleList)
                {
                    CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(triangle.GetVertices(), Point.Round), true, new Bgr(Color.DarkBlue).MCvScalar, 2);
                }
                foreach (RotatedRect box in boxList)
                {
                    CvInvoke.Polylines(triangleRectangleImage, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2);
                }

                triangleRectangleImageBox.Image = cannyEdges;
                #endregion

                #region draw circles
                Mat circleImage = new Mat(img.Size, DepthType.Cv8U, 3);
                circleImage.SetTo(new MCvScalar(0));
                foreach (CircleF circle in circles)
                {
                    CvInvoke.Circle(circleImage, Point.Round(circle.Center), (int)circle.Radius, new Bgr(Color.Brown).MCvScalar, 2);
                }

                circleImageBox.Image = circleImage;
                #endregion

                #region draw lines
                Mat lineImage = new Mat(img.Size, DepthType.Cv8U, 3);
                lineImage.SetTo(new MCvScalar(0));
                foreach (LineSegment2D line in lines)
                {
                    CvInvoke.Line(lineImage, line.P1, line.P2, new Bgr(Color.Green).MCvScalar, 2);
                }

                lineImageBox.Image = uimage;
                #endregion
            }
        }
Exemplo n.º 28
0
        private static void FuseThisTarget(PointF[] src_vertices, int kwadratWidth, ProcessFrameResult result,
                                           AditionaCapturelData acd)
        {
            result.Target.BlackCenter = acd.MainTargetDetails.BlackCenter;
            result.Target.BlackR      = acd.MainTargetDetails.BlackR;

            #region wyznaczenie prostokatow i kwadratow do transformacji perspektywy

            PointF[] dstVertices = new PointF[]
            {
                new PointF(0, 0),                       //tl topleft
                new PointF(kwadratWidth, 0),            //tr
                new PointF(kwadratWidth, kwadratWidth), //br
                new PointF(0, kwadratWidth)
            };                                          //bl

            //kwadrat z lewym dolnym rogiem w tym samym miejscu co znaleziony - tylko po to żeby zobrazować
            //PointF[] dst_vertices_kwadrat_lewydol = new PointF[] {
            //     new PointF(bo_ord[3].X , bo_ord[3].Y - kwadratWidth),
            //     new PointF(bo_ord[3].X + kwadratWidth, bo_ord[3].Y - kwadratWidth),
            //     new PointF(bo_ord[3].X + kwadratWidth, bo_ord[3].Y),
            //     bo_ord[3]};

            #endregion wyznaczenie prostokatow i kwadratow do transformacji perspektywy

            using (Mat warped = new Mat())
            {
                #region tranformacja perspektywy

                Mat  warpMatrix = CvInvoke.GetPerspectiveTransform(src_vertices, dstVertices);
                Size size       = new Size(kwadratWidth, kwadratWidth);
                CvInvoke.WarpPerspective(acd.Frame, warped, warpMatrix, size, Inter.Linear, Warp.Default);

                #endregion tranformacja perspektywy

                #region rysowanie pierscieni

                //Mat circleImage = warped.Clone();
                var pix = Pix(acd.MainTargetDetails.BlackR);
                //DrawCircles(circleImage, pix, useThisTarget.BlackCenter);

                #endregion rysowanie pierscieni

                #region blur gray canny samej tarczy

                Mat canny_output12      = new Mat();
                Mat smallGrayFrame12    = new Mat();
                Mat smoothedGrayFrame12 = new Mat();

                CvInvoke.PyrDown(warped, smallGrayFrame12);
                CvInvoke.PyrUp(smallGrayFrame12, smoothedGrayFrame12);
                CvInvoke.CvtColor(smoothedGrayFrame12, smoothedGrayFrame12, ColorConversion.Bgr2Gray);
                // CvInvoke.GaussianBlur(smoothedGrayFrame12, smoothedGrayFrame12, new Size(9, 9), 1, 1);
                result.SmoothedOryginal = smoothedGrayFrame12;//$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$


                #region test

                CvInvoke.Canny(smoothedGrayFrame12, canny_output12, acd.FirstCannyThresh, acd.secondCannyThresh);

                #endregion test

                #endregion blur gray canny samej tarczy

                #region rozpoznawanie strzału

                int czteryIpolmmRInt = Convert.ToInt32(FourNHalfR(pix));
                int zapasSizeMax     = 5;
                int zapasSizeMin     = 1;


                CircleF[] przestrzeliny = CvInvoke.HoughCircles(smoothedGrayFrame12,
                                                                HoughType.Gradient,
                                                                1,
                                                                400,
                                                                acd.firstCannyThresh1,
                                                                acd.secondCannyThresh1,
                                                                czteryIpolmmRInt - zapasSizeMin,
                                                                czteryIpolmmRInt + zapasSizeMax);

                result.Warped = warped.Clone();//$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$

                foreach (CircleF shot in przestrzeliny)
                {
                    CvInvoke.Circle(warped, Point.Round(shot.Center), czteryIpolmmRInt, new Bgr(Color.Blue).MCvScalar, 1,
                                    LineType.AntiAlias, 0);
                    CvInvoke.Circle(warped, Point.Round(shot.Center), czteryIpolmmRInt - zapasSizeMin,
                                    new Bgr(Color.BlueViolet).MCvScalar, 1, LineType.AntiAlias, 0);
                    CvInvoke.Circle(warped, Point.Round(shot.Center), czteryIpolmmRInt + zapasSizeMax,
                                    new Bgr(Color.Chartreuse).MCvScalar, 1, LineType.AntiAlias, 0);

                    result.Shot = WyliczWartoscPrzestrzeliny(shot.Center, acd.MainTargetDetails);
                    if (acd.MainTargetDetails.CameraFlipped)
                    {
                        result.TargetScanWithResult = Rotate180(warped.Clone());//$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
                    }
                    else
                    {
                        result.TargetScanWithResult = warped.Clone();//$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
                    }
                    break;
                }

                #endregion
            }
        }
Exemplo n.º 29
0
        private void button1_Click(object sender, EventArgs e)
        {
            var image = new Image <Bgr, byte>(@"E:\测试图片\2018-09-05_133455.png");

            this.pictureBox1.Image = image.ToBitmap();
            var grayimage = image.Convert <Gray, byte>();

            this.pictureBox2.Image = grayimage.ToBitmap();


            //var threshImage = grayimage.ThresholdAdaptive(new Gray(255), AdaptiveThresholdType.MeanC, ThresholdType.Binary, 9, new Gray(5));
            //CvInvoke.NamedWindow("ThresholdAdaptive");
            //CvInvoke.Imshow("ThresholdAdaptive", threshImage);
            //CvInvoke.Threshold(grayimage, threshimg, 0, 255, ThresholdType.Otsu);

            //CvInvoke.NamedWindow("Otsu");
            //CvInvoke.Imshow("Otsu", threshImage);
            //CvInvoke.Line(image, new Point(10, 10), new Point(150, 150), new MCvScalar(211,203,0));
            //CvInvoke.NamedWindow("line");
            //CvInvoke.Imshow("line", image);

            //均衡化
            Image <Gray, Byte> histimg = new Image <Gray, Byte>(grayimage.Width, grayimage.Height, new Gray(0.1));

            CvInvoke.EqualizeHist(grayimage, histimg);
            MIplImage          histmi    = (MIplImage)Marshal.PtrToStructure(histimg, typeof(MIplImage));
            Image <Gray, Byte> histimage = new Image <Gray, Byte>(histimg.Width, histimg.Height, histmi.WidthStep, histmi.ImageData);

            this.pictureBox5.Image = histimage.ToBitmap();

            //二值化
            Image <Gray, Byte> threshimg = new Image <Gray, Byte>(grayimage.Width, grayimage.Height);

            CvInvoke.Threshold(histimg, threshimg, 120, 255, ThresholdType.Binary);
            this.pictureBox3.Image = threshimg.ToBitmap();
            //BarcodeReader reader = new BarcodeReader();
            //reader.Options.CharacterSet = "utf-8";
            //var result = reader.Decode(threshimg.ToBitmap());
            //if (result != null)
            //{
            //    this.textBox1.Text = result.Text;
            //}


            //canny算子边缘检测
            Image <Gray, Byte> cannyimg = new Image <Gray, Byte>(grayimage.Width, grayimage.Height, new Gray(0.1));

            CvInvoke.Canny(threshimg, cannyimg, 10, 30);
            MIplImage          cannymi    = (MIplImage)Marshal.PtrToStructure(cannyimg, typeof(MIplImage));
            Image <Gray, Byte> cannyimage = new Image <Gray, Byte>(cannymi.Width, cannymi.Height, cannymi.WidthStep, cannymi.ImageData);

            this.pictureBox4.Image = cannyimage.ToBitmap();


            //circle画圈
            Image <Gray, Byte> circleimg = new Image <Gray, Byte>(grayimage.Width, grayimage.Height, new Gray(255));

            CvInvoke.Circle(circleimg, new Point(150, 150), 50, new MCvScalar(100, 100, 255));
            this.pictureBox6.Image = circleimg.ToBitmap();


            ////mix
            //Image<Gray, Byte> miximg = new Image<Gray, Byte>(grayimage.Width, grayimage.Height, new Gray(255));
            //Image<Gray, Byte> miximg1 = new Image<Gray, Byte>(grayimage.Width, grayimage.Height, new Gray(0.1));
            //Image<Gray, Byte> miximg2 = new Image<Gray, Byte>(grayimage.Width, grayimage.Height, new Gray(255));
            //CvInvoke.ColorChange(miximg1, miximg2, miximg);
            //this.pictureBox6.Image = img_gray(miximg.Bitmap);
        }
Exemplo n.º 30
0
        private void btnCapture_Click(object sender, EventArgs e)
        {
            if (!(rdbFront.Checked || rdbRight.Checked))
            {
                MessageBox.Show("先选择使用的图片来源!", "取像提示");
                return;
            }
            else if (rdbRight.Checked)
            {
                cameraID = 0;
            }
            else
            {
                cameraID = 1;
            }

            OpenFileDialog openFileDialog = new OpenFileDialog();

            if (openFileDialog.ShowDialog() == DialogResult.OK)
            {
                myImg = new Image <Bgr, byte>(openFileDialog.FileName);
                //myImgCopy = myImg.Copy();
            }
            if (myImg == null)
            {
                return;
            }
            DateTime beforDT = DateTime.Now;//计时开始

            //DetectCenterAndSlop detectCenterAndSlop = new DetectCenterAndSlop();


            imageInfo = findCenterAndSlope.GetProductParamters(myImg.Bitmap, cameraID, 50);//0右侧,1前方
            //detectCenterAndSlop.GetCornerAndSlope(myImg.Bitmap, 1);
            DateTime afterDT = DateTime.Now;
            TimeSpan ts      = afterDT.Subtract(beforDT);//计时结束

            label2.Text = "Span Time:\t" + ts + "S";
            binaryImg   = findCenterAndSlope.BinaryImage;
            if (cameraID == 0)
            {
                txtParamters.Text += "处理Upper图像,产品高度:" + string.Format("{{{0}}}", 50) + "\r\n";
            }
            else
            {
                txtParamters.Text += "处理Lower图像,产品高度:" + string.Format("{{{0}}}", 50) + "\r\n";
            }
            //模拟点击比较纠偏按钮
            //btnCalibrationImgClick(new Bitmap[] { myImg.Bitmap, findCenterAndSlope.BinaryImage.Bitmap });

            foreach (PointF item in imageInfo.ImageCorner)
            {
                CvInvoke.Circle(myImg, new Point((int)item.X, (int)item.Y), 8, new MCvScalar(0, 2, 255), 8);
                CvInvoke.PutText(myImg, item.ToString(), new Point((int)item.X, (int)item.Y), FontFace.HersheySimplex, 4, new MCvScalar(0, 255, 88), 3);
                txtParamters.Text += "图像角点坐标" + string.Format("{{{0},{1}}}", item.X, item.Y) + "\r\n";
            }
            CvInvoke.Line(myImg, new Point((int)imageInfo.ImageCorner[0].X, (int)imageInfo.ImageCorner[0].Y),
                          new Point((int)imageInfo.ImageCorner[imageInfo.ImageCorner.Length - 2].X,
                                    (int)imageInfo.ImageCorner[imageInfo.ImageCorner.Length - 2].Y),
                          new MCvScalar(0, 255, 0), 10);
            CvInvoke.Line(myImg, new Point((int)imageInfo.ImageCorner[1].X, (int)imageInfo.ImageCorner[1].Y),
                          new Point((int)imageInfo.ImageCorner[imageInfo.ImageCorner.Length - 1].X,
                                    (int)imageInfo.ImageCorner[imageInfo.ImageCorner.Length - 1].Y),
                          new MCvScalar(0, 255, 0), 10);
            for (int i = 1; i < imageInfo.ImageCorner.Length + 1; i++)
            {
                CvInvoke.Line(myImg, new Point((int)imageInfo.ImageCorner[i - 1].X, (int)imageInfo.ImageCorner[i - 1].Y),
                              new Point((int)imageInfo.ImageCorner[i % imageInfo.ImageCorner.Length].X,
                                        (int)imageInfo.ImageCorner[i % imageInfo.ImageCorner.Length].Y),
                              new MCvScalar(0, 255, 0), 10);
            }
            float pr = (float)imageInfo.AxisLong / (float)imageInfo.AxisShort;

            txtParamters.Text += "\r\n长短轴比:" + pr.ToString() + "\r\n";
            double x1 = Math.Sqrt(Math.Pow((imageInfo.ImageCorner[0].X - imageInfo.ImageCorner[2].X), 2) + Math.Pow((imageInfo.ImageCorner[0].Y - imageInfo.ImageCorner[2].Y), 2));
            double x2 = Math.Sqrt(Math.Pow((imageInfo.ImageCorner[1].X - imageInfo.ImageCorner[3].X), 2) + Math.Pow((imageInfo.ImageCorner[1].Y - imageInfo.ImageCorner[3].Y), 2));

            txtParamters.Text += "\r\n对角线1:" + x1.ToString() + "\r\n";
            txtParamters.Text += "\r\n对角线2:" + x2.ToString() + "\r\n";
            #region 使用DLL
            //double robotRotated = detectCenterAndSlop.LineK1 < detectCenterAndSlop.LineK2 ? detectCenterAndSlop.LineK1 : detectCenterAndSlop.LineK2;
            //if (robotRotated * 180f / Math.PI < 45)
            //{
            //    txtParamters.Text += "机器人顺时针旋转角度:\t\t" + robotRotated.ToString() + "\r\n";
            //}
            //else
            //{
            //    txtParamters.Text += "机器人逆时针旋转角度:\t\t" + (90 - robotRotated).ToString() + "\r\n";
            //}
            #endregion

            double robotRotated = Math.Abs(imageInfo.RotatedAngle) > 45 ? imageInfo.RotatedAngle + 90 : imageInfo.RotatedAngle;
            if (robotRotated >= 0)
            {
                txtParamters.Text += "机器人顺时针旋转角度:\r\n\t\t" + robotRotated.ToString() + "\r\n";
            }
            else
            {
                txtParamters.Text += "机器人逆时针旋转角度:\r\n\t\t" + robotRotated.ToString() + "\r\n";
            }

            // txtParamters.Text += "机器人法兰中心点X:\t\t" + imageInfo.CenterOfRobot.X.ToString() + "\r\n";
            // txtParamters.Text += "机器人法兰中心点Y:\t\t" + imageInfo.CenterOfRobot.Y.ToString() + "\r\n";

            CvInvoke.Circle(myImg, new Point((int)imageInfo.CenterOfImg.X, (int)imageInfo.CenterOfImg.Y), 15, new MCvScalar(255, 2, 5), 13);
            txtParamters.Text += "图像中心点Y:\t\t" + imageInfo.CenterOfImg.X.ToString() + "\r\n";
            txtParamters.Text += "图像中心点Y:\t\t" + imageInfo.CenterOfImg.Y.ToString() + "\r\n";

            txtParamters.Text += "长轴:\t\t" + imageInfo.AxisLong.ToString() + "\r\n";
            txtParamters.Text += "短轴:\t\t" + imageInfo.AxisShort.ToString() + "\r\n";


            txtParamters.Text += "长轴像素比:\t" + (imageInfo.AxisLong / 2000f).ToString() + "\r\n";
            txtParamters.Text += "短轴像素比:\t" + (imageInfo.AxisShort / 1200f).ToString() + "\r\n";


            double[] Q = EA2Q((-robotRotated / 180 * Math.PI), 0, 0);
            //txtParamters.Text += "\r\n\r\n" +"四元数:\t\r\n"+ Q[0] + "\t\r\n" + Q[1] + "\t\r\n" + Q[2] + "\t\r\n" + Q[3];
            pictureBox1.Image = myImg.ToBitmap();

            // File.AppendAllText("Record.txt", "长轴:\t\t" + imageInfo.AxisLong.ToString() + "\r\n"+ "短轴:\t\t" + imageInfo.AxisShort.ToString() + "\r\n"+ DateTime.Now.ToString() + "\r\n\r\n");
        }