Пример #1
0
        static void MSER_Preprocessing(ref Mat img, out OpenCvSharp.Point offset_bounding_rec, List <OpenCvSharp.Point[]> contours_final)
        {
            OpenCvSharp.Point[][] temp = new Point[1][];

            Cv2.GaussianBlur(img, img, new OpenCvSharp.Size(7, 7), 0, 0);

            /*
             * //忽略內圈和外圈一些面積
             * OpenCvSharp.Point[][] temp = new Point[1][];
             * temp[0] = contours_final[0];
             * Cv2.DrawContours(img_copy, temp, -1, 255, 40);
             * temp[0] = contours_final[1];
             * Cv2.DrawContours(img_copy, temp, -1, 255, 40);
             */

            //忽略外圈一些面積
            temp[0] = contours_final[1];
            Cv2.DrawContours(img, temp, -1, 255, 100);
            //temp[0] = contours_final[0];
            //Cv2.DrawContours(img, temp, -1, 255, 20);

            //200原因:外圈預留空間
            var biggestContourRect = Cv2.BoundingRect(contours_final[0]);
            var expand_rect        = new Rect(biggestContourRect.TopLeft.X - 200, biggestContourRect.TopLeft.Y - 200, biggestContourRect.Width + 200, biggestContourRect.Height + 200);

            img = new Mat(img, expand_rect);
            offset_bounding_rec = expand_rect.TopLeft;
        }
Пример #2
0
        // private static readonly Scalar green = new Scalar(0, 255, 0);

        public static void Draw(Mat frame, Point p1, Point p2, float confidence, Color color)
        {
            var thickness = (int)(5 * confidence);

            if (thickness <= 0)
            {
                thickness = 1;
            }

            Scalar c = new Scalar(color.R, color.G, color.B);

            Cv2.Rectangle(frame, p1, p2, c, thickness, LineTypes.Link4);
            var text = $"{confidence:N}";

            // Cv2.PutText(frame, text, new Point(x1, y1), HersheyFonts.HersheySimplex, 10, Scalar.Green, 5, LineTypes.AntiAlias, false);
            // Cv2.PutText(frame, text, new Point(x2, y2), HersheyFonts.HersheySimplex, 1, Scalar.Red, 5, LineTypes.AntiAlias, false);
            // Cv2.PutText(frame, text, new Point(x2, y1), HersheyFonts.HersheySimplex, 2, Scalar.BlueViolet, 5, LineTypes.AntiAlias, false);

            // var p1s = $"({p1.X} , {p1.Y})";
            // Cv2.PutText(frame, p1s, p1, HersheyFonts.HersheySimplex, 0.5, Scalar.Red, 2, LineTypes.AntiAlias, false);
            //
            // p1 = new Point(x2, y2);
            // p1s = $"({p1.X} , {p1.Y})";
            // Cv2.PutText(frame, p1s, p1, HersheyFonts.HersheySimplex, 0.5, green, 2, LineTypes.AntiAlias, false);

            var pText = new Point(p1.X + 10, p2.Y - 10);

            Cv2.PutText(frame, text, pText, HersheyFonts.HersheySimplex, 0.5, c, 2, LineTypes.AntiAlias, false);
        }
Пример #3
0
        /// <summary>
        /// 画像が見つかるまでスリープ
        /// </summary>
        /// <param name="templatePath"></param>
        /// <param name="maxWaitTime"></param>
        public static void SleepUntilTemplateMatching(string templatePath, int maxWaitTime = 60000)
        {
            try {
                int count = 0;
                while (true)
                {
                    Thread.Sleep(1000);
                    if (maxWaitTime <= 1000 * count)
                    {
                        break;
                    }

                    Bitmap deskTopBmp = GetDeskTopBmp();

                    OpenCvSharp.Point minPoint = new OpenCvSharp.Point();
                    OpenCvSharp.Point maxPoint = new OpenCvSharp.Point();
                    double            minVal   = 0;
                    double            maxVal   = 0;
                    Mat result = GetTemplateMatchingMat(templatePath, deskTopBmp);
                    Cv2.MinMaxLoc(result, out minVal, out maxVal, out minPoint, out maxPoint);

                    if (maxVal >= 0.9)
                    {
                        //終了画像を見つけた
                        Trace.WriteLine("終了画像を見つけた:" + maxVal.ToString());
                        SetCursorPos(maxPoint.X + 10, maxPoint.Y + 10);
                        break;
                    }
                    count++;
                }
            } catch (OpenCvSharp.OpenCVException ee) {
                System.Diagnostics.Debug.WriteLine(ee.ErrMsg);
            }
        }
Пример #4
0
        private OpenCvSharp.Point getOMRImage(String path)
        {
            int offset = 100;

            Mat src, gray, binary, canny;

            src = Cv2.ImRead(path);
            Rect rect = new Rect(offset, offset, src.Width - offset * 2, src.Height - offset * 2);

            src = src.SubMat(rect);

            gray   = new Mat();
            binary = new Mat();
            canny  = new Mat();

            Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
            Cv2.Threshold(gray, binary, 150, 255, ThresholdTypes.Binary);
            Cv2.Canny(binary, canny, 0, 0, 3);

            // width, height
            OpenCvSharp.Point pt = projectPerspective(src, canny);
            src.Dispose();
            gray.Dispose();
            binary.Dispose();
            canny.Dispose();

            return(pt);
        }
Пример #5
0
        private Point2f ComputeVector(CircleSegment c, OpenCvSharp.Point target, bool forward)
        {
            double x         = (target.X - c.Center.X);
            double y         = (target.Y - c.Center.Y);
            var    maxRadius = Math.Max(c.Radius, Math.Sqrt(x * x + y * y));

            x /= maxRadius;
            y /= maxRadius;

            // could return ship pitch and roll here ...

            /*
             * var rollangle = Math.Atan2(x, -y); // wrong order on purpose so that up is 0 degrees roll.
             * var pitchangle = Math.Asin(Math.Sqrt(x*x + y*y) / maxRadius);
             * if (!forward)
             *  pitchangle = Math.PI - pitchangle;
             * return new PointF((float)pitchangle, (float)rollangle);
             */

            // but x/y is actually easier to handle since we are only doing a crude alignment, and not computing angular velocities or anything
            if (!forward)
            {
                y = (y > 0) ? 2 - y : -2 - y; // if target is behind, add lots of pitch offset so that exactly wrong direction is 2/-2.
            }
            return(new Point2f((float)x, (float)y));
        }
Пример #6
0
        private double Hypotenuse(OpenCvSharp.Point p1, OpenCvSharp.Point p2)
        {
            var widthSquared  = Math.Pow(p1.X - p2.X, 2);
            var heightSquared = Math.Pow(p1.Y - p2.Y, 2);

            return(Math.Sqrt(widthSquared + heightSquared));
        }
Пример #7
0
        private static int GetSquareSize(CvPoint p1, CvPoint p0)
        {
            var dx = p1.X - p0.X;
            var dy = p1.Y - p0.Y;

            return(dx * dx + dy * dy);
        }
Пример #8
0
        public static int ptDist(ref OpenCvSharp.Point p1, ref OpenCvSharp.Point p2) // rms distance. Среднеквадратичное расстояние между точками
        {
            var dx = p2.X - p1.X;
            var dy = p2.Y - p1.Y;

            return((int)Math.Sqrt(dx * dx + dy * dy));
        }
Пример #9
0
        private static Mat DetectFace(CascadeClassifier cascade, Mat photo)
        {
            Mat result;

            //Mat src = photo;
            using (var src = photo)
                using (var gray = new Mat())
                {
                    result = src.Clone();
                    Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);

                    // Detect faces
                    Rect[] faces = cascade.DetectMultiScale(
                        gray, 1.08, 2, HaarDetectionType.ScaleImage, new OpenCvSharp.Size(30, 30));

                    // Render all detected faces
                    foreach (Rect face in faces)
                    {
                        var center = new OpenCvSharp.Point
                        {
                            X = (int)(face.X + face.Width * 0.5),
                            Y = (int)(face.Y + face.Height * 0.5)
                        };
                        var axes = new OpenCvSharp.Size
                        {
                            Width  = (int)(face.Width * 0.5),
                            Height = (int)(face.Height * 0.5)
                        };
                        Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4);
                    }

                    return(result);
                }
        }
Пример #10
0
        public OpenCvSharp.Point searchIMG(Bitmap screen_img, Bitmap find_img)
        {
            OpenCvSharp.Point result = new OpenCvSharp.Point();
            //스크린 이미지 선언
            using (Mat ScreenMat = OpenCvSharp.Extensions.BitmapConverter.ToMat(screen_img))
                //찾을 이미지 선언
                using (Mat FindMat = OpenCvSharp.Extensions.BitmapConverter.ToMat(find_img))
                    //스크린 이미지에서 FindMat 이미지를 찾아라
                    using (Mat res = ScreenMat.MatchTemplate(FindMat, TemplateMatchModes.CCoeffNormed))
                    {
                        //찾은 이미지의 유사도를 담을 더블형 최대 최소 값을 선언합니다.
                        double minval, maxval = 0;
                        //찾은 이미지의 위치를 담을 포인트형을 선업합니다.
                        OpenCvSharp.Point minloc, maxloc;
                        //찾은 이미지의 유사도 및 위치 값을 받습니다.
                        Cv2.MinMaxLoc(res, out minval, out maxval, out minloc, out maxloc);
                        Debug.WriteLine("찾은 이미지의 유사도 : " + maxval);

                        //이미지를 찾았을 경우 클릭이벤트를 발생!!
                        if (maxval >= 0.8)
                        {
                            result = maxloc;
                            //InClick(maxloc.X, maxloc.Y);
                            return(result);
                        }
                    }

            return(result);
        }
Пример #11
0
        public POINT FindPoint(string imgName)
        {
            Bitmap fullScreen = GetFullScreen();

            OpenCvSharp.Point pt = searchIMG(fullScreen, picture[imgName]);
            return(new POINT(pt.X, pt.Y));
        }
Пример #12
0
        //Removing closest points. Удаляем ближайшие точки по принципу из ближайших точек выбираем самую дальнюю от ценрта ладони
        public static void SimplifyNeighbors(ref List <OpenCvSharp.Point> handPTS, ref OpenCvSharp.Point center_of_palm)
        {
            for (int it = 0; it < handPTS.Count; it++)
            {
                for (int j = it + 1; j < handPTS.Count;)
                {
                    var pointHere  = handPTS[it];
                    var pointThere = handPTS[j];

                    var PtFirst_to_palm = ptDist(ref center_of_palm, ref pointHere);
                    var PTsec_to_palm   = ptDist(ref center_of_palm, ref pointThere);

                    var Exdistance = ptDist(ref pointHere, ref pointThere);

                    if (Exdistance <= 25)
                    {
                        if (PTsec_to_palm > PtFirst_to_palm)
                        {
                            handPTS.RemoveAt(it);
                        }
                        else
                        {
                            handPTS.RemoveAt(j);
                        }
                    }
                    else
                    {
                        j += 1;
                    }
                }
            }
        }
Пример #13
0
        private void readFile(int index = 0)
        {
            currentCenter      = new OpenCvSharp.Point();
            currentRadius      = 0;
            circleSizeVelocity = 0;
            if (files.Length > 0)
            {
                index = Math.Min(index, files.Length - 1);
                index = Math.Max(index, 0);
                string filePath = files[index].FullName;
                if (pictureBox.Image != null)
                {
                    pictureBox.Image.Dispose();
                }

                pictureBox.Image       = Image.FromFile(filePath);
                imageSizeLabel.Text    = $"이미지크기 : {pictureBox.Image.Width} x {pictureBox.Image.Height}";
                fileIndex              = index;
                fileNumberTextBox.Text = (fileIndex + 1).ToString();
                fileTextBox.Text       = filePath;

                faceDetect(filePath);
            }
            else
            {
                fileTextBox.Text = "없음";
            }
        }
Пример #14
0
        private TXYW DrawToOutput(IEnumerable <OPoint> contour, Parameters para)
        {
            bool detectAngle = para.DetectAngle && (para.Shape != Parameters.TargetShape.Circle);

            rawmat.CopyTo(output3);
            Moments m    = Cv2.Moments(contour);
            TXYW    txyw = new TXYW();

            output3.DrawContours(new IEnumerable <OPoint>[] { contour }, 0, Scalar.Red, 5);

            double comx = m.M10 / m.M00;
            double comy = m.M01 / m.M00;

            Cv2.Circle(output3, (int)comx, (int)comy, 10, Scalar.Red, -1);

            txyw.t = PosFrames;
            txyw.x = comx;
            txyw.y = comy;

            if (detectAngle)
            {
                double       angle = 0.5 * Math.Atan2(2 * m.Mu11, m.Mu20 - m.Mu02);
                const double r     = 50;
                OPoint       arrow = new OPoint(comx + r * Math.Cos(angle), comy + r * Math.Sin(angle));
                Cv2.Line(output3, new OPoint(comx, comy), arrow, Scalar.Red, 5);
                txyw.w = angle;
            }

            Cv2.PutText(output3, $"frame:{txyw.t}",
                        new OPoint(20, videoCapture.FrameHeight - 20),
                        HersheyFonts.HersheySimplex, 2, Scalar.Red, 3);
            return(txyw);
        }
Пример #15
0
        public void GetLocations(out Point2f[] pts0, out Point2i pt1, out Point2i pt2)
        {
            List <Point2f> ptsList0 = new List <Point2f>();

            if (centres0.Count != 0)
            {
                foreach (Point2i c0 in centres0)
                {
                    ptsList0.Add(c0);
                }
                centres0.Clear();
            }
            // else ptsList0.Add(new Point2f(-1, -1));
            pts0 = ptsList0.ToArray();

            if (centres1.Count != 0)
            {
                pt1 = centres1[0];
                centres1.Clear();
            }
            else
            {
                pt1 = new Point2i(-1, -1);
            }
            if (centres2.Count != 0)
            {
                pt2 = centres2[0];
                centres2.Clear();
            }
            else
            {
                pt2 = new Point2i(-1, -1);
            }
        }
Пример #16
0
        private OpenCvSharp.Point Midpoints(OpenCvSharp.Point p1, OpenCvSharp.Point p2)
        {
            int x = (int)(p1.X + p2.X) / 2;
            int y = (int)(p1.Y + p2.Y) / 2;

            return(new OpenCvSharp.Point(x, y));
        }
Пример #17
0
        static void Main(string[] args)
        {
            Console.WriteLine("[+] Starting...");
            while (is_window())
            {
                Console.WriteLine("[-] Capture Window");
                capture();
                sleep();

                Tuple <OpenCvSharp.Point, string> match_result = template_match();
                OpenCvSharp.Point match_point = match_result.Item1;
                string            match_type  = match_result.Item2;
                if (match_point.X == 0 && match_point.Y == 0)
                {
                    Console.WriteLine("[-] {0}", match_type);
                }
                else
                {
                    Console.WriteLine("[*] {0}", match_type);
                    mouse_click(get_window_point().X + match_point.X, get_window_point().Y + match_point.Y);
                    //break;
                }
                //break;
                sleep();
            }
        }
        /// <summary>
        /// Render detected faces via OpenCV.
        /// </summary>
        /// <param name="state">Current frame state.</param>
        /// <param name="image">Web cam or video frame.</param>
        /// <returns>Returns new image frame.</returns>
        private static Mat RenderFaces(FrameState state, Mat image)
        {
            Mat result = image.Clone();

            Cv2.CvtColor(image, image, ColorConversionCodes.BGR2GRAY);

            // Render all detected faces
            foreach (var face in state.Faces)
            {
                var center = new OpenCvSharp.Point
                {
                    X = face.Center.X,
                    Y = face.Center.Y
                };
                var axes = new OpenCvSharp.Size
                {
                    Width  = (int)(face.Size.Width * 0.5) + 10,
                    Height = (int)(face.Size.Height * 0.5) + 10
                };

                Cv2.Ellipse(result, center, axes, 0, 0, 360, _faceColorBrush, 4);
            }

            return(result);
        }
Пример #19
0
        private void resize_ROIs(System.Drawing.Size before, System.Drawing.Size after)
        ///<summary>
        /// Resize the ROIs when the size of Form is being changed
        ///</summary>
        ///<param name="after"> the new form size after resizing </param>
        ///<param name="before"> the original form size before resizing </param>
        {
            Cv2.Resize(maskTrackArea, maskTrackArea, new OpenCvSharp.Size(after.Width, after.Height));

            for (int i = 0; i < roiLines.Count; i++)
            {
                for (int j = 0; j < roiLines[i].Count; j++)
                {
                    double x = roiLines[i][j].X * after.Width / before.Width;
                    double y = roiLines[i][j].Y * after.Height / before.Height;

                    roiLines[i][j] = new OpenCvSharp.Point(x, y);
                }
            }

            for (int j = 0; j < roiTrackArea.Count; j++)
            {
                double x = roiTrackArea[j].X * after.Width / before.Width;
                double y = roiTrackArea[j].Y * after.Height / before.Height;

                roiTrackArea[j] = new OpenCvSharp.Point(x, y);
            }
        }
Пример #20
0
        /// <summary>
        /// Draws the connection line between two items and also the lenght
        /// </summary>
        /// <param name="uiImage"></param>
        /// <param name="item"></param>
        /// <param name="item2"></param>
        public void DrawLineFromItemToItem(Mat uiImage, YoloItem item, YoloItem item2, out Line line)
        {
            var startPoint = new OpenCvSharp.Point(item.Center().X *SCALE, item.Center().Y *SCALE);
            var endPoint   = new OpenCvSharp.Point(item2.Center().X *SCALE, item2.Center().Y *SCALE);

            line = new Line()
            {
                X1 = item.Center().X *SCALE,
                Y1 = item.Center().Y *SCALE,
                X2 = item2.Center().X *SCALE,
                Y2 = item2.Center().Y *SCALE
            };

            // Draw the line
            Cv2.Line(uiImage,
                     startPoint,
                     endPoint,
                     Scalar.Red, 2);

            // Draw distance and length
            // Casting from points to points is really annoying... TODO: Refactor that.
            var sysStartPoint = new System.Windows.Point(startPoint.X, startPoint.Y);
            var sysEndPoint   = new System.Windows.Point(endPoint.X, endPoint.Y);
            var distance      = GeometryHelper.Distance(sysStartPoint, sysEndPoint);
            var length        = distance * coordinateLength;
            var centerOfLine  = GeometryHelper.GetCenterOfLine(sysStartPoint, sysEndPoint);

            // Put text in it.
            DrawTextblockWithBackground(
                uiImage,
                centerOfLine,
                $"{string.Format("{0:0.00}", length)}m",
                Scalar.Red
                );
        }
Пример #21
0
        private void bStart_MouseClick(object sender, MouseEventArgs e)
        {
            //CvCapture camera = new CvCapture("C:\\rosbank\\facedetect\\test\\media\\test.avi");
            var capture = new VideoCapture("C:\\rosbank\\facedetect\\test\\media\\test.avi");
            //capture.Set(CaptureProperty.FrameWidth, 320);
            //capture.Set(CaptureProperty.FrameHeight, 240);

            int sleepTime = (int)Math.Round(1000 / capture.Fps);

            Mat image = new Mat();

            Rect[] faces = null;
            int    i     = 0;

            while (true)
            {
                i++;
                capture.Read(image);
                if (image.Empty())
                {
                    break;
                }


                Mat small = new Mat();
                Cv2.Resize(image, small, new OpenCvSharp.Size(320, 240), 0, 0, InterpolationFlags.Lanczos4);

                if (i % 5 == 0)
                {
                    var cascade = new CascadeClassifier(@"C:\opencv3\opencv\sources\data\haarcascades\haarcascade_frontalface_alt.xml");
                    faces = cascade.DetectMultiScale(small, 1.08, 2, HaarDetectionType.ScaleImage, new OpenCvSharp.Size(30, 30));
                }
                if (faces != null && faces.Length > 0)
                {
                    foreach (Rect face in faces)
                    {
                        var center = new OpenCvSharp.Point
                        {
                            X = (int)(face.X + face.Width * 0.5),
                            Y = (int)(face.Y + face.Height * 0.5)
                        };
                        var axes = new OpenCvSharp.Size
                        {
                            Width  = (int)(face.Width * 0.5),
                            Height = (int)(face.Height * 0.5)
                        };

                        Mat f = new Mat(small, face);
                        pb1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(f);
                        pb1.Refresh();

                        Cv2.Ellipse(small, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 2);
                    }
                }

                bpMain.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(small);
                bpMain.Refresh();
                Cv2.WaitKey(100);
            }
        }
Пример #22
0
        private string Verify(string patternpath)
        {
            string tmpname = $"tmp{DateTime.Now.ToString("hh_mm_ss")}.png";

            fifaImage.Save(tmpname);

            var image = new Mat(tmpname);

            File.Delete(tmpname);
            var template = new Mat(patternpath);

            var w = (image.Width - template.Width) + 1;
            var h = (image.Height - template.Height) + 1;

            double minVal, maxVal;

            OpenCvSharp.Point minLoc, maxLoc;

            var result = image.MatchTemplate(template, TemplateMatchModes.CCorrNormed);

            result.MinMaxLoc(out minVal, out maxVal, out minLoc, out maxLoc);
            string VaryName = Path.GetFileNameWithoutExtension(patternpath);

            //MessageBox.Show($"Is it {VaryName}? : match={maxVal}");

            if (maxVal > 0.9999900)
            {
                this.mutchLoc = maxLoc;
                return(patternpath);
            }

            return("");
        }
Пример #23
0
        public Mat Overlay(Mat frame, OpenCvSharp.Point offset)
        {
            if (this.State != GmapState.Collapsed && this.Gmap.Empty())
            {
                return(frame);
            }

            var area = new OpenCvSharp.Rect(offset, this.GetGmapSize(frame));

            if (this.State == GmapState.Collapsed)
            {
                frame = frame.CvtColor(ColorConversionCodes.BGR2BGRA);
                var icon = this._icon.Resize(area.Size);
                var mask = this._mask.Resize(area.Size);
                icon.CopyTo(new Mat(frame, area), mask);
                frame = frame.CvtColor(ColorConversionCodes.BGRA2BGR);
            }
            else if (this.State == GmapState.Expanded)
            {
                var gmap = this.Gmap.Resize(area.Size);
                gmap.CopyTo(new Mat(frame, area));
            }
            else
            {
                var gmap = this.Gmap.Resize(area.Size);
                frame = gmap;
            }

            this._currentFrame = frame;
            return(this._currentFrame);
        }
Пример #24
0
        public static OpenCvSharp.Point[] Square(Mat src)
        {
            Mat[] split  = Cv2.Split(src);
            Mat   blur   = new Mat();
            Mat   binary = new Mat();

            OpenCvSharp.Point[] squares = new OpenCvSharp.Point[4];

            int    N   = 10;
            double max = src.Size().Width *src.Size().Height * 0.9;
            double min = src.Size().Width *src.Size().Height * 0.1;

            for (int channel = 0; channel < 3; channel++)
            {
                Cv2.GaussianBlur(split[channel], blur, new OpenCvSharp.Size(5, 5), 1);
                for (int i = 0; i < N; i++)
                {
                    Cv2.Threshold(blur, binary, i * 255 / N, 255, ThresholdTypes.Binary);

                    OpenCvSharp.Point[][] contours;
                    HierarchyIndex[]      hierarchy;
                    Cv2.FindContours(binary, out contours, out hierarchy, RetrievalModes.List, ContourApproximationModes.ApproxTC89KCOS);
                }
            }
        }
Пример #25
0
        public static OpenCvSharp.Point[] Square(Mat src)
        {
            Mat[] split  = Cv2.Split(src);
            Mat   blur   = new Mat();
            Mat   binary = new Mat();

            OpenCvSharp.Point[] squares = new OpenCvSharp.Point[4];

            int    N   = 10;
            double max = src.Size().Width *src.Size().Height * 0.9;
            double min = src.Size().Width *src.Size().Height * 0.1;

            for (int channel = 0; channel < 3; channel++)
            {
                Cv2.GaussianBlur(split[channel], blur, new OpenCvSharp.Size(5, 5), 1);
                for (int i = 0; i < N; i++)
                {
                    Cv2.Threshold(blur, binary, i * 255 / N, 255, ThresholdTypes.Binary);

                    OpenCvSharp.Point[][] contours;
                    HierarchyIndex[]      hierarchy;
                    Cv2.FindContours(binary, out contours, out hierarchy, RetrievalModes.List, ContourApproximationModes.ApproxTC89KCOS);

                    for (int j = 0; j < contours.Length; j++)
                    {
                        double perimeter           = Cv2.ArcLength(contours[j], true);
                        OpenCvSharp.Point[] result = Cv2.ApproxPolyDP(contours[j], perimeter * 0.02, true);

                        double area   = Cv2.ContourArea(result);
                        bool   convex = Cv2.IsContourConvex(result);
                    }
                }
            }
        }
Пример #26
0
        internal bool SearchImageFromDict(Dictionary <string, Bitmap> buttonImages, out Point centerPoint, out string name)
        {
            window = autoIt.window;
            double threshold = 0.85;

            name        = null;
            centerPoint = new OpenCvSharp.Point();

            gameScreen_graphics.CopyFromScreen(window.X, window.Y, 0, 0, size_region);
            using Mat result     = new Mat();
            using Mat gameScreen = OpenCvSharp.Extensions.BitmapConverter.ToMat(gameScreen_bitmap);       //Сохраняем скрин экрана в mat
            using Mat mat_region_desktop_gray = gameScreen.CvtColor(ColorConversionCodes.BGR2GRAY);

            foreach (KeyValuePair <string, Bitmap> buttonImage in buttonImages)
            {
                using Mat searchImg      = OpenCvSharp.Extensions.BitmapConverter.ToMat(buttonImage.Value);
                using Mat searchImg_gray = searchImg.CvtColor(ColorConversionCodes.BGR2GRAY);

                Cv2.MatchTemplate(mat_region_desktop_gray, searchImg_gray, result, TemplateMatchModes.CCoeffNormed);                     //Поиск шаблона
                Cv2.Threshold(result, result, threshold, 1.0, ThresholdTypes.Tozero);
                Cv2.MinMaxLoc(result, out double minVal, out double maxVal, out OpenCvSharp.Point minLoc, out OpenCvSharp.Point maxLoc); //Поиск точки
                if (maxVal > threshold)
                {
                    centerPoint = new OpenCvSharp.Point(maxLoc.X + buttonImage.Value.Width / 2, maxLoc.Y + buttonImage.Value.Height / 2);
                    overlay.DrawRect(maxLoc.X, maxLoc.Y, buttonImage.Value.Width, buttonImage.Value.Height);
                    name = buttonImage.Key;
                    return(true);
                }
            }

            return(false);
        }
Пример #27
0
        private void find_max_area(Mat src_gray, ref OpenCvSharp.Point center, ref Mat dst_color)//カラーで返す
        {
            src_gray.Threshold(0, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);
            Mat[] contours;
            Mat   hie = new Mat();

            Cv2.FindContours(src_gray, out contours, hie, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            if (contours.Length > 0)
            {
                double max_size  = 0;
                int    max_index = 0;

                for (int i = 0; i < contours.Length; i++)
                {
                    double size = Cv2.ContourArea(contours[i]);
                    if (max_size < size)
                    {
                        max_size  = size;
                        max_index = i;
                    }
                }
                Cv2.DrawContours(dst_color, contours, max_index, new Scalar(255, 255, 255), -1);
                RotatedRect box = Cv2.MinAreaRect(contours[max_index]);
                center = box.Center;
            }

            contours = null;
            hie.Dispose();
            src_gray.Dispose();
        }
Пример #28
0
        public Tracker()
        {
            InitializeComponent();
            //UI

            label_RedBG.SendToBack();
            label_BlueBG.SendToBack();
            label_RedBG.Controls.Add(label_CarA);
            label_RedBG.Controls.Add(labelAScore);
            label_BlueBG.Controls.Add(label_CarB);
            int newX = label_CarB.Location.X - label_BlueBG.Location.X;
            int newY = label_CarB.Location.Y - label_BlueBG.Location.Y;

            label_CarB.Location = new System.Drawing.Point(newX, newY);
            label_BlueBG.Controls.Add(labelBScore);
            newX = labelBScore.Location.X - label_BlueBG.Location.X;
            newY = labelBScore.Location.Y - label_BlueBG.Location.Y;
            labelBScore.Location = new System.Drawing.Point(newX, newY);
            label_GameCount.Text = "上半场";

            InitialCaiServer();
            MessageBox.Show("TCP IP is " + server.getUsedIP().ToString() + "  port is " + server.getPort().ToString());
            udp = new CaiNetwork.CaiUDP();
            MessageBox.Show("UDP IP is " + udp.broadcastIpEndPoint.Address.ToString() + "  port is " + udp.broadcastIpEndPoint.Port.ToString());

            // Init
            flags = new MyFlags();
            flags.Init();
            flags.Start();
            ptsShowCorners = new Point2f[4];
            cc             = new CoordinateConverter(flags);
            localiser      = new Localiser();
            capture        = new VideoCapture();
            // threadCamera = new Thread(CameraReading);
            capture.Open(0);
            timeCamNow  = DateTime.Now;
            timeCamPrev = timeCamNow;

            car1 = new Point2i();
            car2 = new Point2i();

            buttonStart.Enabled   = true;
            buttonPause.Enabled   = false;
            button_AReset.Enabled = false;
            button_BReset.Enabled = false;

            Game.LoadMap();
            game = new Game();

            if (capture.IsOpened())
            {
                capture.FrameWidth  = flags.cameraSize.Width;
                capture.FrameHeight = flags.cameraSize.Height;
                capture.ConvertRgb  = true;
                timer100ms.Interval = 75;
                timer100ms.Start();
                //Cv2.NamedWindow("binary");
            }
        }
Пример #29
0
        /// <summary>
        /// カメラ画像を取得して次々に表示を切り替える
        /// </summary>
        public virtual void Capture(object state)
        {
            var haarCascade = new CascadeClassifier("data/haarcascades/haarcascade_frontalface_default.xml");

            var camera = new VideoCapture(0 /*0番目のデバイスを指定*/)
            {
                // キャプチャする画像のサイズフレームレートの指定
                FrameWidth  = 480,
                FrameHeight = 270,
                // Fps = 60
            };

            using (var img = new Mat()) // 撮影した画像を受ける変数
                using (camera) {
                    while (true)
                    {
                        if (this.IsExitCapture)
                        {
                            this.Dispatcher.Invoke(() => this._Image.Source = null);
                            break;
                        }


                        camera.Read(img); // Webカメラの読み取り(バッファに入までブロックされる

                        if (img.Empty())
                        {
                            break;
                        }
                        var result = img.Clone();
                        using (var gray = new Mat()) {
                            Cv2.CvtColor(img, gray, ColorConversionCodes.BGR2GRAY);
                            var faces = haarCascade.DetectMultiScale(
                                gray,
                                1.08,
                                2,
                                HaarDetectionType.FindBiggestObject,
                                new OpenCvSharp.Size(50, 50)
                                );
                            foreach (var face in faces)
                            {
                                var center = new OpenCvSharp.Point {
                                    X = (int)(face.X + face.Width * 0.5),
                                    Y = (int)(face.Y + face.Height * 0.5)
                                };
                                var axes = new OpenCvSharp.Size {
                                    Width  = (int)(face.Width * 0.5),
                                    Height = (int)(face.Height * 0.5)
                                };
                                Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4);
                            }
                        }

                        this.Dispatcher.Invoke(() => {
                            this._Image.Source = result.ToWriteableBitmap(); // WPFに画像を表示
                        });
                    }
                }
        }
        /// <summary>
        /// Create a processed image of the dynamic threshold algorithm used as visual feedback for the user
        /// and rendered in the video feed panel on the user interface <br/>
        ///Image processing algorithm
        /// </summary>
        /// <param name="img">input image, of type OpenCvSharp.Mat</param>
        /// <param name="largestContourArea">desired contour, of type OpenCvSharp.Mat</param>
        /// <param name="contourCenter">center point of desired contour, of type OpenCvSharp.Point</param>
        /// <param name="actuatorPositionPixels">position of an actuator translated into pixels, of type System.Drawing.Point</param>
        /// <returns>output image, of type OpenCvSharp.Mat</returns>
        public Mat ComposeImageDTC(ref Mat img, ref Mat largestContourArea, ref OpenCvSharp.Point contourCenter, ref System.Drawing.Point actuatorPositionPixels)
        {
            // Convert to RGB, draw largest contour, draw largest contour center, put coordinates text, draw actuators position
            Cv2.CvtColor(img, imgComposed, ColorConversionCodes.GRAY2BGR);
            Cv2.DrawContours(imgComposed, new Mat[] { largestContourArea }, -1, Scalar.FromRgb(0, 128, 255), 2);
            Cv2.Circle(imgComposed, contourCenter, 2, Scalar.FromRgb(250, 50, 50), 5);
            Cv2.PutText(imgComposed, contourCenter.X.ToString() + "," + contourCenter.Y.ToString(), contourCenter, HersheyFonts.HersheySimplex, 1, Scalar.FromRgb(250, 50, 50), 2);
            //Cv2.Rectangle(imgComposed, new OpenCvSharp.Point(100, 100), new OpenCvSharp.Point(300, 300), Scalar.Azure, 2, LineTypes.AntiAlias, 0);

            // Draw actuator position line - Either axis X, either Y, either both
            foreach (Enums.Axis axis in TASettings.ConnectedAxisList)
            {
                if (axis == Enums.Axis.X)
                {
                    Cv2.Line(imgComposed, new OpenCvSharp.Point(actuatorPositionPixels.X, 0), new OpenCvSharp.Point(actuatorPositionPixels.X, imgComposed.Height - 1), Scalar.FromRgb(0, 250, 100), 3);
                }

                else if (axis == Enums.Axis.Y)
                {
                    Cv2.Line(imgComposed, new OpenCvSharp.Point(0, actuatorPositionPixels.Y), new OpenCvSharp.Point(imgComposed.Width - 1, actuatorPositionPixels.Y), Scalar.FromRgb(0, 250, 100), 3);
                }
            }

            // Draw IDC bounding box if necessary
            if (TrackingSettings.TrackingAlgorithm == Enums.TrackingAlgoirthm.ImageDistance)
            {
                imgComposed = DrawBoundingBox(ref imgGrayscale,
                                              new OpenCvSharp.Point()
                {
                    X = IDCSettings.BoxTopLeft.X, Y = IDCSettings.BoxTopLeft.Y
                },
                                              new OpenCvSharp.Point()
                {
                    X = IDCSettings.BoxBottomRight.X, Y = IDCSettings.BoxBottomRight.Y
                },
                                              Scalar.Orange,
                                              LineTypes.Link4
                                              );
            }

            // Draw bounding box if necessary
            if (TrackingSettings.TrackingAlgorithm == Enums.TrackingAlgoirthm.BoundingBox)
            {
                imgComposed = DrawBoundingBox(ref imgGrayscale,
                                              new OpenCvSharp.Point()
                {
                    X = BoundingBoxCalib.TopLeftPixelPos.X, Y = BoundingBoxCalib.TopLeftPixelPos.Y
                },
                                              new OpenCvSharp.Point()
                {
                    X = BoundingBoxCalib.BottomRightPixelPos.X, Y = BoundingBoxCalib.BottomRightPixelPos.Y
                },
                                              Scalar.BlueViolet,
                                              LineTypes.Link4
                                              );
            }

            return(imgComposed);
        }