Exemple #1
0
        private Mat Recipe(string path, double value, string option)
        {
            if (path != null)
            {
                Mat orgMat     = new Mat(path);
                Mat previewMat = new Mat();

                #region //Algorithm
                Mat matrix = new Mat();
                switch (option)
                {
                case "Contrast":
                    Cv2.AddWeighted(orgMat, value, orgMat, 0, 0, previewMat);
                    break;
                //AddWeighted 함수를 이용해서 gamma 인자를 통해 가중치의 합에 추가적인 덧셈을 한꺼번에 수행 할 수 있다.
                //computes weighted sum of two arrays (dst = alpha*src1 + beta*src2 + gamma)
                //http://suyeongpark.me/archives/tag/opencv/page/2

                case "Brightness":
                    Cv2.Add(orgMat, value, previewMat);
                    break;
                //Add 함수를 이용해서 영상의 덧셈을 수행 한다.
                //Add 연산에서는 자동으로 포화 연산을 수행한다.
                //http://suyeongpark.me/archives/tag/opencv/page/2


                case "Blur":
                    Cv2.GaussianBlur(orgMat, previewMat, new OpenCvSharp.Size(9, 9), value, 1, BorderTypes.Default);     //GaussianBlur
                    break;
                //영상이나 이미지를 흐림 효과를 주어 번지게 하기 위해 사용합니다. 해당 픽셀의 주변값들과 비교하고 계산하여 픽셀들의 색상 값을 재조정합니다.
                //각 필세마다 주변의 픽셀들의 값을 비교하고 계산하여 픽섹들의 값을 재조정 하게 됩니다. 단순 블러의 경우 파란 사격형 안에 평균값으로
                //붉은색 값을 재종하게 되고, 모든 픽셀들에 대하여 적용을 하게 된다.
                //https://076923.github.io/posts/C-opencv-13/

                case "Rotation":
                    matrix = Cv2.GetRotationMatrix2D(new Point2f(orgMat.Width / 2, orgMat.Height / 2), value, 1.0);     // 2x3 회전 행렬 생성 함수 GetRotationMatrix2D
                    Cv2.WarpAffine(orgMat, previewMat, matrix, new OpenCvSharp.Size(orgMat.Width, orgMat.Height), InterpolationFlags.Linear, BorderTypes.Replicate);
                    break;
                //WarpAffine(원본 배열, 결과 배열, 행렬, 결과 배열의 크기) 결과 배열의 크기를 설정하는 이유는 회전 후, 원본 배열의 이미지 크기와 다를 수 있기 때문이다.
                //Interpolation.Linear은 영상이나 이미지 보간을 위해 보편적으로 사용되는 보간법이다.
                //BoderTypes.Replicate 여백을 검은색으로 채우면서 회전이 되더라도 zeropadding 된다.
                //https://076923.github.io/posts/C-opencv-6/

                case "Rotation90":
                    matrix = Cv2.GetRotationMatrix2D(new Point2f(orgMat.Width / 2, orgMat.Height / 2), 90, 1.0);
                    Cv2.WarpAffine(orgMat, previewMat, matrix, new OpenCvSharp.Size(orgMat.Width, orgMat.Height), InterpolationFlags.Linear, BorderTypes.Reflect);
                    break;
                //WarpAffine(원본 배열, 결과 배열, 행렬, 결과 배열의 크기) 결과 배열의 크기를 설정하는 이유는 회전 후, 원본 배열의 이미지 크기와 다를 수 있기 때문이다.
                //Interpolation.Linear은 영상이나 이미지 보간을 위해 보편적으로 사용되는 보간법이다.
                //BoderTypes.Replicate 여백을 검은색으로 채우면서 회전이 되더라도 zeropadding 된다.
                //https://076923.github.io/posts/C-opencv-6/


                case "Horizontal Flip":
                    Cv2.Flip(orgMat, previewMat, FlipMode.Y);
                    break;
                //Flip(원본 이미지, 결과 이미지, 대칭 축 색상 공간을 변환), 대칭 축(FlipMode)를 사용하여 대칭 진행
                //https://076923.github.io/posts/C-opencv-5/


                case "Vertical Flip":
                    Cv2.Flip(orgMat, previewMat, FlipMode.X);
                    break;
                //Flip(원본 이미지, 결과 이미지, 대칭 축 색상 공간을 변환), 대칭 축(FlipMode)를 사용하여 대칭 진행
                //https://076923.github.io/posts/C-opencv-5/


                case "Noise":
                    matrix = new Mat(orgMat.Size(), MatType.CV_8UC3);
                    Cv2.Randn(matrix, Scalar.All(0), Scalar.All(value));
                    Cv2.AddWeighted(orgMat, 1, matrix, 1, 0, previewMat);
                    break;
                //Randn 정규 분포를 나타내는 이미지를 랜덤하게 생성하는 방법
                //AddWeighted 두 이미지를 가중치를 설정하여 합치면서 진행
                //

                case "Zoom In":
                    //#1. Center만 확대
                    double width_param  = (int)(0.8 * orgMat.Width);                                                                                                                           // 0.8이 배율 orgMat.Width이 원본이미지의 사이즈 // 나중에 0.8만 80%형식으로 바꿔서 파라미터로 빼야됨
                    double height_param = (int)(0.8 * orgMat.Height);                                                                                                                          // 0.8이 배율 orgMat.Height 원본이미지의 사이즈 //
                    int    startX       = orgMat.Width - (int)width_param;                                                                                                                     // 이미지를 Crop해올 좌상단 위치 지정하는값 // 원본사이즈 - 배율로 감소한 사이즈
                    int    startY       = orgMat.Height - (int)height_param;                                                                                                                   //
                    Mat    tempMat      = new Mat(orgMat, new OpenCvSharp.Rect(startX, startY, (int)width_param - (int)(0.2 * orgMat.Width), (int)height_param - (int)(0.2 * orgMat.Height))); //중간과정 mat이고 Rect안에 x,y,width,height 값 지정해주는거
                                                                                                                                                                                               //예외처리 범위 밖으로 벗어나는경우 shift시키거나 , 제로페딩을 시키거나
                                                                                                                                                                                               //예외처리
                    Cv2.Resize(tempMat, previewMat, new OpenCvSharp.Size(orgMat.Width, orgMat.Height), (double)((double)orgMat.Width / (double)(width_param - (int)(0.2 * orgMat.Width))),
                               (double)((double)orgMat.Height / ((double)(height_param - (int)(0.2 * orgMat.Height)))), InterpolationFlags.Cubic);
                    // (double) ( (double)orgMat.Width  /  (double)width_param)
                    // 형변환       원본이미지 형변환      /       타겟이미지 배율    == 타겟이미지가 원본이미지 대비 몇배인가? 의 수식임
                    // (double) ( (double)orgMat.Height  /  (double)height_param)
                    break;


                case "Sharpen":
                    float   filterBase   = -1f;
                    float   filterCenter = filterBase * -9;
                    float[] data         = new float[9] {
                        filterBase, filterBase, filterBase,
                        filterBase, filterCenter, filterBase,
                        filterBase, filterBase, filterBase
                    };
                    Mat kernel = new Mat(3, 3, MatType.CV_32F, data);
                    Cv2.Filter2D(orgMat, previewMat, orgMat.Type(), kernel);
                    break;
                //
                //
                //

                // Contrast Limited Adapative Histogram Equalization
                case "CLAHE":
                    CLAHE test = Cv2.CreateCLAHE();
                    test.SetClipLimit(10.0f);
                    if (value < 1)
                    {
                        value = 1;
                    }
                    test.SetTilesGridSize(new OpenCvSharp.Size(value, value));
                    Mat normalized = new Mat();
                    Mat temp       = new Mat();
                    Cv2.CvtColor(orgMat, orgMat, ColorConversionCodes.RGB2HSV);
                    var splitedMat = orgMat.Split();

                    test.Apply(splitedMat[2], splitedMat[2]);
                    Cv2.Merge(splitedMat, previewMat);
                    Cv2.CvtColor(previewMat, previewMat, ColorConversionCodes.HSV2RGB);
                    break;

                //
                //
                //

                default:
                    break;
                }
                matrix.Dispose(); //이미지의 메모리 할당을 해제 합니다.
                orgMat.Dispose(); //이미지의 메모리 할당을 해제 합니다.
                return(previewMat);

                #endregion
            }
            return(null);
        }
Exemple #2
0
        private static void MakeImagesForArticle()
        {
            var resizeK = 0.2;

            var dir = "Example/";

            var src   = new Mat("0.bmp");
            var src_g = new Mat("0.bmp", LoadMode.GrayScale);

            var src_1   = new Mat("1.bmp");
            var src_1_g = new Mat("1.bmp", LoadMode.GrayScale);

            var background   = new Mat("background.bmp");
            var background_g = new Mat("background.bmp", LoadMode.GrayScale);

            src.Resize(resizeK).ImWrite(dir + "0.png");
            src_g.Resize(resizeK).ImWrite(dir + "0 g.png");
            src_g.ThresholdStairs().Resize(resizeK).ImWrite(dir + "0 g th.png");

            var canny = new Mat();

            Cv2.Canny(src_g, canny, 50, 200);
            canny.Resize(0.5).ImWrite(dir + "0 canny.png");

            Mat[] src_channels;
            Cv2.Split(src, out src_channels);

            for (var i = 0; i < src_channels.Length; ++i)
            {
                var channels = Enumerable.Range(0, src_channels.Length).Select(j => new Mat(src_channels[0].Rows, src_channels[0].Cols, src_channels[0].Type())).ToArray();
                channels[i] = src_channels[i];
                var dst = new Mat();
                Cv2.Merge(channels, dst);
                dst.Resize(resizeK).ImWrite(dir + string.Format("0 ch{0}.png", i));
                src_channels[i].ThresholdStairs().Resize(resizeK).ImWrite(dir + string.Format("0 ch{0} th.png", i));
            }

            if (true)
            {
                src.Resize(0.4).ImWrite(dir + "0.png");
                src_1.Resize(0.4).ImWrite(dir + "1.png");
                background.Resize(0.4).ImWrite(dir + "bg.png");

                var dst_01 = new Mat();
                Cv2.Absdiff(src, src_1, dst_01);
                dst_01.Resize(resizeK).ImWrite(dir + "01.png");
                dst_01.Cut(new Rect(50, src.Height * 4 / 5 - 50, src.Width / 5, src.Height / 5)).ImWrite(dir + "01 part.png");
                dst_01.Cut(new Rect(50, src.Height * 4 / 5 - 50, src.Width / 5, src.Height / 5)).CvtColor(ColorConversion.RgbToGray).ImWrite(dir + "01 g.png");
                dst_01.CvtColor(ColorConversion.RgbToGray).ThresholdStairs().Resize(resizeK).ImWrite(dir + "01 g th.png");

                var dst_01_g = new Mat();
                Cv2.Absdiff(src_g, src_1_g, dst_01_g);
                dst_01_g.Cut(new Rect(50, src.Height * 4 / 5 - 50, src.Width / 5, src.Height / 5)).ImWrite(dir + "0g1g.png");
                dst_01_g.ThresholdStairs().Resize(resizeK).ImWrite(dir + "0g1g th.png");
            }

            if (true)
            {
                var dst_0b = new Mat();
                Cv2.Absdiff(src, background, dst_0b);
                dst_0b.Resize(0.6).ImWrite(dir + "0b.png");

                var dst_0b_g = new Mat();
                Cv2.Absdiff(src_g, background_g, dst_0b_g);
                dst_0b_g.Resize(0.3).ImWrite(dir + "0b g.png");
                dst_0b_g.ThresholdStairs().Resize(0.3).ImWrite(dir + "0b g th.png");
            }
            if (true)
            {
                var hsv_src = new Mat();
                Cv2.CvtColor(src, hsv_src, ColorConversion.RgbToHsv);


                var hsv_background = new Mat();
                Cv2.CvtColor(background, hsv_background, ColorConversion.RgbToHsv);

                var hsv_background_channels = hsv_background.Split();

                var hsv_src_channels = hsv_src.Split();

                if (true)
                {
                    var all = new Mat(src.ToIplImage(), true);
                    for (var i = 0; i < hsv_src_channels.Length; ++i)
                    {
                        hsv_src_channels[i].CvtColor(ColorConversion.GrayToRgb).CopyTo(all, new Rect(i * src.Width / 3, src.Height / 2, src.Width / 3, src.Height / 2));
                    }
                    src_g.CvtColor(ColorConversion.GrayToRgb).CopyTo(all, new Rect(src.Width / 2, 0, src.Width / 2, src.Height / 2));
                    all.Resize(0.3).ImWrite(dir + "all.png");
                }

                foreach (var pair in new[] { "h", "s", "v" }.Select((channel, index) => new { channel, index }))
                {
                    var diff = new Mat();
                    Cv2.Absdiff(hsv_src_channels[pair.index], hsv_background_channels[pair.index], diff);
                    diff.Resize(0.3).With_Title(pair.channel).ImWrite(dir + string.Format("0b {0}.png", pair.channel));
                    diff.ThresholdStairs().Resize(0.3).ImWrite(dir + string.Format("0b {0} th.png", pair.channel));

                    hsv_src_channels[pair.index].Resize(resizeK).With_Title(pair.channel).ImWrite(dir + string.Format("0 {0}.png", pair.channel));

                    foreach (var d in new[] { -100, -50, 50, 100 })
                    {
                        var delta = new Mat(hsv_src_channels[pair.index].ToIplImage(), true);
                        delta.Rectangle(new Rect(0, 0, delta.Width, delta.Height), new Scalar(Math.Abs(d)), -1);

                        var new_channel = new Mat();
                        if (d >= 0)
                        {
                            Cv2.Add(hsv_src_channels[pair.index], delta, new_channel);
                        }
                        else
                        {
                            Cv2.Subtract(hsv_src_channels[pair.index], delta, new_channel);
                        }

                        //delta.ImWrite(dir + string.Format("d{0}{1}.png", pair.channel, d));
                        //new_channel.ImWrite(dir + string.Format("q{0}{1}.png", pair.channel, d));

                        var new_hsv = new Mat();
                        Cv2.Merge(hsv_src_channels.Select((channel, index) => index == pair.index ? new_channel : channel).ToArray(), new_hsv);

                        var res = new Mat();
                        Cv2.CvtColor(new_hsv, res, ColorConversion.HsvToRgb);
                        res.Resize(resizeK).With_Title(string.Format("{0} {1:+#;-#}", pair.channel, d)).ImWrite(dir + string.Format("0 {0}{1}.png", pair.channel, d));
                    }
                }
                //if (true)
                //{
                //  var mat = new Mat(src.ToIplImage(), true);
                //  mat.CopyTo(
                //}
            }
        }
        private Mat FindContoursAndDraw(Bitmap originalMap, string objectName, int minArea = 500, int maxArea = 10000)
        {
            //var houghBitmap = HoughTransform(originalMap);
            //var invertedHoughBitmap = InvertImage(houghBitmap);
            Mat originalMat = BitmapConverter.ToMat(originalMap);
            //Mat invertedHoughMat = BitmapConverter.ToMat(invertedHoughBitmap);
            Mat blackWhiteMat = new Mat();
            Mat edgesMat      = new Mat();

            Cv2.CvtColor(originalMat, blackWhiteMat, ColorConversionCodes.BGRA2GRAY);
            if (MapObjectsColors.GetInstance().Tight.Contains(objectName))
            {
                Bitmap edgesMap = BitmapConverter.ToBitmap(blackWhiteMat);
                edgesMap = ImageFilter.SobelFilter(edgesMap, grayscale: true);
                edgesMat = BitmapConverter.ToMat(edgesMap);
                Cv2.CvtColor(edgesMat, edgesMat, ColorConversionCodes.BGRA2GRAY);
            }
            else
            {
                Cv2.Canny(blackWhiteMat, edgesMat, 50, 100);
            }


            OpenCvSharp.Point[][] contours;
            HierarchyIndex[]      hierarchyIndexes;
            Cv2.FindContours(
                edgesMat,
                out contours,
                out hierarchyIndexes,
                mode: RetrievalModes.CComp,
                method: ContourApproximationModes.ApproxSimple);



            var componentCount = 0;
            var contourIndex   = 0;
            var objectDict     = mapObjects.getObjectDictionary();

            if (contours.Length != 0)
            {
                if (objectDict.ContainsKey(objectName))
                {
                    objectDict[objectName] = contours;
                }
                else
                {
                    objectDict.Add(objectName, contours);
                }
                while ((contourIndex >= 0))
                {
                    var contour          = contours[contourIndex];
                    var boundingRect     = Cv2.BoundingRect(contour);
                    var boundingRectArea = boundingRect.Width * boundingRect.Height;
                    var ca  = Cv2.ContourArea(contour) * Convert.ToDouble(scaleBox.SelectedItem) / 100;
                    var cal = Cv2.ArcLength(contour, closed: true) * Convert.ToDouble(scaleBox.SelectedItem) / 100;

                    //if (boundingRectArea > minArea)
                    //{

                    Cv2.PutText(originalMat, $"A:{ca.ToString("#.##")} km2", new OpenCvSharp.Point(boundingRect.X, boundingRect.Y + 10), HersheyFonts.HersheyPlain, 1, Scalar.White, 1);
                    Cv2.PutText(originalMat, $"L:{cal.ToString("#.##")} km", new OpenCvSharp.Point(boundingRect.X, boundingRect.Y + 25), HersheyFonts.HersheyPlain, 1, Scalar.White, 1);


                    //}


                    //Cv2.DrawContours(
                    //    originalMat,
                    //    contours,
                    //    contourIndex,
                    //    color: Scalar.All(componentCount + 1),
                    //    thickness: -1,
                    //    lineType: LineTypes.Link8,
                    //    hierarchy: hierarchyIndexes,
                    //    maxLevel: int.MaxValue);

                    componentCount++;


                    contourIndex = hierarchyIndexes[contourIndex].Next;
                }
            }

            return(originalMat);
        }
        private void button1_Click(object sender, EventArgs e)
        {
            video = new VideoCapture("D:\\project\\c#\\Day021\\vtest.avi");
            Random r = new Random();
            int    cnt = 0;
            int    x = 0, y = 0, W = 0, H = 0;

            int               sleepTime = (int)Math.Round(1000 / video.Fps);
            String            filenameBodyCascade = "D:\\project\\c#\\Day021\\haarcascade_fullbody.xml"; // haar-cascade 훈련 사용
            CascadeClassifier bodyCascade = new CascadeClassifier();

            if (!bodyCascade.Load(filenameBodyCascade))
            {
                Console.WriteLine("error");
                return;
            }

            inCvImage  = new Mat();
            outCvImage = new Mat();

            int  oH  = inCvImage.Height;
            int  oW  = inCvImage.Width;
            bool csi = false;

            while (true)
            {
                cnt = 0;

                video.Read(inCvImage);

                if (inCvImage.Empty())
                {
                    break;
                }
                // detect
                Rect[] body = bodyCascade.DetectMultiScale(inCvImage);
                Console.WriteLine(body.Length);

                foreach (var item in body)
                {
                    Scalar c = new Scalar(r.Next(0, 255), r.Next(0, 255), r.Next(0, 255));
                    Cv2.Rectangle(inCvImage, item, c); // add rectangle to the image
                    Console.WriteLine("body : " + item);
                    cnt++;

                    x = item.X;
                    y = item.Y;
                    W = item.Width;
                    H = item.Height;
                }

                if (cnt > 4)
                {
                    csi = true;
                }

                if (csi)
                {
                    outCvImage = Mat.Ones(new OpenCvSharp.Size(oW, oH), MatType.CV_8UC1);
                    Cv2.CvtColor(inCvImage, outCvImage, ColorConversionCodes.BGR2GRAY);
                    Cv2.AdaptiveThreshold(outCvImage, outCvImage, 255,
                                          AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 25, 5);
                }
                else
                {
                    outCvImage = inCvImage;
                }

                // display
                //Cv2.ImShow("CCTV", frame);

                picboxCow.ImageIpl = outCvImage;

                if (Cv2.WaitKey(9) == 27)
                {
                    inCvImage.Dispose(); outCvImage.Dispose();
                    video.Release();
                    break;
                }

                if (cnt > 7)
                {
                    video.Release();
                    break;
                }
            }


            // grayscale
            Cv2.CvtColor(inCvImage, outCvImage, ColorConversionCodes.BGR2GRAY);
            picboxCow.ImageIpl = outCvImage;

            Delay(2000); //각 변환 시각화

            //histogram equalizition을 통해서 픽셀를 고르게 분포
            Cv2.EqualizeHist(outCvImage, outCvImage);
            picboxCow.ImageIpl = outCvImage;

            Delay(2000);      //각 변환 시각화

            dst1 = new Mat(); //누적 image 작업을 위한 초기화
            dst2 = new Mat();


            Cv2.Resize(outCvImage, dst1, new OpenCvSharp.Size(1920, 1280), 0, 0, InterpolationFlags.Lanczos4);

            //무리 내의 사람 판독을 위해 따로 배출
            Rect rect = new Rect(x, y, W, H);

            dst1 = outCvImage.SubMat(rect);

            //image 크기와 맞게 picplcsi 설정
            picplCSi.Size     = new System.Drawing.Size(W, H);
            picplCSi.ImageIpl = dst1;
            lbf.Visible       = true;

            Delay(2000);//각 변환 시각화

            //보다 효율적으로 시각화 하기위한 사이즈 확대
            Cv2.Resize(dst1, dst2, new OpenCvSharp.Size(800, 600), 0, 0, InterpolationFlags.Lanczos4);

            //image 크기와 맞게 picplcsi 설정
            this.Size     = new System.Drawing.Size();
            picplCSi.Size = new System.Drawing.Size(800, 600);

            Cv2.ImShow("dst2", dst2);//image 출력

            Cv2.WaitKey(2000);

            //파일 내로 저장 후 메모리 누수 발생 방지를 위해 사용했던 Matrix Dispose시킵니다.
            string _saveName = "C:/images/" + DateTime.Now.ToString("yyyy/MM/dd_hh_mm_ss") + ".jpeg";

            Cv2.ImWrite(_saveName, dst2);
            inCvImage.Dispose();
            outCvImage.Dispose();
            dst1.Dispose();

            //Check Message
            MessageBox.Show("업로드", "Checking", MessageBoxButtons.OK, MessageBoxIcon.Information);
            Close();
        }
Exemple #5
0
        //使用AI模型检测屏幕,识别人员及位置
        public DetectionResult DetectionScreen()
        {
            long toltalMillis = 0;
            long startTicks   = DateTime.Now.Ticks;

            //保存所有检测对象及最大置信度对象的位置
            DetectionResult      detectionResult  = new DetectionResult();
            List <ObjectPosRect> objectPosRects   = new List <ObjectPosRect>();
            ObjectPosRect        maxConfidencePos = new ObjectPosRect();

            maxConfidencePos.setValues(0, 0, 0, 0, 0, 0);

            //拷贝需要检测屏幕区域的图像
            //c#中处理图形,需要注意手动释放资源,及线程不安全的问题。
            //System.Drawing.Bitmap bitmap = new Bitmap(this.detectionRect.w, this.detectionRect.h);
            //System.Drawing.Graphics graphics = Graphics.FromImage(bitmap);
            this.detectGraphics.CopyFromScreen(this.detectionRect.x, this.detectionRect.y, 0, 0, new System.Drawing.Size(this.detectionRect.w, this.detectionRect.h), CopyPixelOperation.SourceCopy);

            try
            {
                //注意这里不用using语句,因为会隐式释放图像对象,导致外面其他函数使用对象时无法访问。
                Mat frameMat = BitmapConverter.ToMat(this.detectBitmap);
                if (frameMat != null && !frameMat.Empty())
                {
                    //屏幕截图和视频截图格式不一样,需要进行图像格式转换
                    Cv2.CvtColor(frameMat, frameMat, ColorConversionCodes.RGBA2RGB);

                    // Convert Mat to batch of images
                    //var frameWidth = frameMat.Cols;
                    //var frameHeight = frameMat.Rows;
                    var frameWidth  = 512;
                    var frameHeight = 512;

                    //注意转换位输入数据时,最好保持原图大小,或者按比例缩放,相对于不按比例缩放,可以让识别准确率大幅提升。
                    //注意scaleFactor和scalemean参数设置,涉及模型输入数据归一化,严重影响模型准确率。
                    //using (var inputBlob = CvDnn.BlobFromImage(frameMat, 0.008, new OpenCvSharp.Size(320, 320), new Scalar(104, 117, 123), true, false))
                    //using (var inputBlob = CvDnn.BlobFromImage(frameMat, 0.008, new OpenCvSharp.Size(320, 320), new Scalar(103.939, 116.779, 123.68), true, false))
                    //using (var inputBlob = CvDnn.BlobFromImage(frameMat, 0.008, new OpenCvSharp.Size(frameWidth, frameHeight), new Scalar(103.939, 116.779, 123.68), true, false))
                    //using (var inputBlob = CvDnn.BlobFromImage(frameMat, 1.0 / 127.5, new OpenCvSharp.Size(frameWidth, frameHeight), new Scalar(127.5, 127.5, 127.5), true, false))
                    using (var inputBlob = CvDnn.BlobFromImage(frameMat, 1.0 / 255, new OpenCvSharp.Size(frameWidth, frameHeight), new Scalar(123.675, 116.28, 103.53), true, false))
                    {
                        //使用AI模型推理检测图像
                        detectionNet.SetInput(inputBlob);
                        var output = detectionNet.Forward();

                        //分析检测结果
                        var detectionMat = new Mat(output.Size(2), output.Size(3), MatType.CV_32F, output.Ptr(0));
                        for (int i = 0; i < detectionMat.Rows; i++)
                        {
                            float confidence = detectionMat.At <float>(i, 2);

                            if (confidence > 0.30)
                            {
                                int classid = (int)detectionMat.At <float>(i, 1);
                                //判断识别的类型,这个应用场景只显示识别的人,提升处理效率
                                //if (classid < this.labelNames.Length &&  (classid == 1 || classid != 1))
                                //if (classid < this.labelNames.Length && (classid == 1))
                                if (classid < this.labelNames.Length && (classid == 0))
                                {
                                    int x1 = (int)(detectionMat.At <float>(i, 3) * frameWidth);
                                    int y1 = (int)(detectionMat.At <float>(i, 4) * frameHeight);
                                    int x2 = (int)(detectionMat.At <float>(i, 5) * frameWidth);
                                    int y2 = (int)(detectionMat.At <float>(i, 6) * frameHeight);

                                    ObjectPosRect objectPosRect = new ObjectPosRect();
                                    objectPosRect.setValues(classid, confidence, x1, y1, x2, y2);
                                    objectPosRects.Add(objectPosRect);

                                    //判断是否是最大人员模型;
                                    //为避免处理逻辑太复杂,最大模型宽度设置为一个常量;
                                    //if ((objectPosRect.x2 - objectPosRect.x1) < this.detectionRect.w / 3 &&
                                    //    (this.maxPersonPos.x2 - this.maxPersonPos.x1 < objectPosRect.x2 - objectPosRect.x1))
                                    //    this.maxPersonPos.setValues(objectPosRect);

                                    //为保障项目,排除太大或者太小的模型
                                    if ((objectPosRect.x2 - objectPosRect.x1) <= 200 && (objectPosRect.x2 - objectPosRect.x1) >= 10 &&
                                        (objectPosRect.y2 - objectPosRect.y1) <= 280 && (objectPosRect.y2 - objectPosRect.y1) >= 10)
                                    {
                                        //判断是否是游戏操作人,模型位置为屏幕游戏者位置
                                        //游戏者的位置在屏幕下方靠左一点,大概 860/1920处
                                        //另外游戏中左右摇摆幅度较大,所以x轴的兼容值要设置大一些。
                                        if (Math.Abs(objectPosRect.x1 + (objectPosRect.x2 - objectPosRect.x1) / 2 - this.playerCentX) <= 100 &&
                                            objectPosRect.y1 > this.detectionRect.h * 1 / 2 &&
                                            Math.Abs(this.detectionRect.h - objectPosRect.y2) <= 10)
                                        {
                                            //排除游戏者自己
                                            //var testi = 0;
                                        }
                                        else
                                        {
                                            //保存置信度最大的人员的位置,同时排除屏幕下方正中的游戏者自己
                                            if (objectPosRect.confidence > maxConfidencePos.confidence)
                                            {
                                                maxConfidencePos.setValues(objectPosRect);
                                            }
                                        }
                                    }

                                    long endTicks = DateTime.Now.Ticks;
                                    toltalMillis = (endTicks - startTicks) / 10000;
                                }
                            }
                        }
                    }
                    detectionResult.setValues(this.rawDetectionRect, this.detectionRect, frameMat, objectPosRects, maxConfidencePos, this.maxPersonW, toltalMillis);
                    //图像显示放到其他线程中处理
                    //this.ShowPicture(frameMat, objectPosRects, maxConfidencePos, toltalMillis);
                }

                //Thread.Sleep(100);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }

            //使用类属性,只创建了一个对象,不能手工释放
            //graphics.Dispose();
            //bitmap.Dispose();

            return(detectionResult);
        }
Exemple #6
0
        static void Main(string[] args)
        {
            var detectedFaceGrayImage = new Mat();
            var grayImage             = new Mat();
            var srcImage      = new Mat();
            var cascade       = new CascadeClassifier(@"..\..\Data\haarcascade_frontalface_alt.xml");
            var nestedCascade = new CascadeClassifier(@"..\..\Data\haarcascade_eye_tree_eyeglasses.xml");
            var smile         = new CascadeClassifier(@"..\..\Data\haarcascade_smile.xml");

            VideoCapture video = new VideoCapture(0);

            for (int i = 0; i < 1000; i++)
            {
                video.Read(srcImage);

                Cv2.CvtColor(srcImage, grayImage, ColorConversionCodes.BGRA2GRAY);
                Cv2.EqualizeHist(grayImage, grayImage);

                var faces = cascade.DetectMultiScale(
                    image: grayImage,
                    scaleFactor: 1.1,
                    minNeighbors: 2,
                    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                    minSize: new Size(30, 30)
                    );

                foreach (var faceRect in faces)
                {
                    var detectedFaceImage = new Mat(srcImage, faceRect);
                    Cv2.Rectangle(srcImage, faceRect, Scalar.Red, 2);

                    var nestedObjects = nestedCascade.DetectMultiScale(
                        image: grayImage,
                        scaleFactor: 1.1,
                        minNeighbors: 2,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new Size(30, 30)
                        );

                    foreach (var nestedObject in nestedObjects)
                    {
                        Cv2.Rectangle(srcImage, nestedObject, Scalar.YellowGreen, 2);
                    }

                    var nestedSmile = smile.DetectMultiScale(
                        image: grayImage,
                        scaleFactor: 3.1,
                        minNeighbors: 2,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new Size(30, 30)
                        );

                    foreach (var nestedObject in nestedSmile)
                    {
                        Cv2.Rectangle(srcImage, nestedObject, Scalar.Green, 2);
                    }
                }



                Cv2.ImShow("tela", srcImage);
                Cv2.WaitKey(1); // do events
            }

            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
            srcImage.Dispose();
        }
        static void Main(string[] args)
        {
            VideoCapture Capture = new VideoCapture(rtspUrl);

            Cv2.NamedWindow("test", WindowFlags.Normal);
            Cv2.NamedWindow("org", WindowFlags.Normal);

#if VIDEO
            if (Capture.IsOpened())
            {
                Mat    img        = new Mat();
                Point  startPoint = new Point(0, 0);
                Point  endPoint   = new Point(1920, 0);
                double minFitness = double.MinValue;
                while (Capture.Read(img))
                {
                    Mat imgGray = new Mat();

                    Cv2.CvtColor(img, imgGray, ColorConversionCodes.BGR2GRAY); //KS:灰度化
                    Rect roi = new Rect(0, 0, 1920, 120);
                    Cv2.Rectangle(imgGray, roi, Scalar.White, -1);             //KS:剔除摄像机字段信息


                    Cv2.MedianBlur(imgGray, imgGray, 9);                          //KS:中值滤波
                    Cv2.Threshold(imgGray, imgGray, 0, 255, ThresholdTypes.Otsu); //KS:二值化
                    //Cv2.Threshold(imgGray, imgGray, 100, 250, ThresholdTypes.Tozero);

                    Cv2.Canny(imgGray, imgGray, 10, 50, 3); //KS:边沿提取
                    //KS: 霍夫直线提取-3:一条直线累加的平面阈值;-2:过滤比这个短的线;-1:点与点之间的间隔
                    LineSegmentPoint[] linePoint = Cv2.HoughLinesP(imgGray, 1.0, Cv2.PI / 180, 100, 300, 100);

                    GeneticOptimizor2 GA = new GeneticOptimizor2();
                    GA.AddLine(linePoint);
                    GA.Run(ref minFitness, ref startPoint, ref endPoint); //KS:遗传算法
                    Cv2.Line(img, startPoint, endPoint, Scalar.Red, 4);


                    for (int i = 0; i < linePoint.Length; i++)
                    {
                        Point p1 = linePoint[i].P1;
                        Point p2 = linePoint[i].P2;
                        Cv2.Line(img, p1, p2, Scalar.Green, 4);
                    }

                    Cv2.ImShow("org", img);
                    Cv2.ImShow("test", imgGray);
                    Cv2.WaitKey(1);

                    bool isCheck = false;
                    if (isCheck)
                    {
                        GA.CheckAnswer();
                    }
                }
            }
#else
            Mat img     = Cv2.ImRead(@"D:\C#\Code\CaptureSeaLine\bin\Debug\SeaLine.png", ImreadModes.Color);
            Mat imgGray = new Mat();
            Cv2.CvtColor(img, imgGray, ColorConversionCodes.BGR2GRAY);
            Cv2.MedianBlur(imgGray, imgGray, 9);
            Cv2.Threshold(imgGray, imgGray, 0, 255, ThresholdTypes.Otsu);
            //Cv2.Threshold(imgGray, imgGray, 100, 250, ThresholdTypes.Tozero);

            Cv2.Canny(imgGray, imgGray, 10, 50, 3);
            LineSegmentPoint[] linePoint = Cv2.HoughLinesP(imgGray, 1.0, Cv2.PI / 180, 200, 300, 100);
            for (int i = 0; i < linePoint.Length; i++)
            {
                Point p1 = linePoint[i].P1;
                Point p2 = linePoint[i].P2;
                Cv2.Line(img, p1, p2, Scalar.Green, 4);
            }
            Cv2.ImShow("test", imgGray);
            Cv2.ImShow("org", img);
            Cv2.WaitKey(0);
#endif
        }
Exemple #8
0
        public async Task AddSample(Mat frame)
        {
            if (RemainingSeconds > 0)
            {
                Rect roi = new Rect();
                if (frame.Width > 0)
                {
                    _target = new Mat();
                    Cv2.CopyTo(frame, _target);

                    if (_useHeadDetection)
                    {
                        Mat gray = new Mat();
                        Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                        Rect[] faces = _haarCascade.DetectMultiScale(gray, _config.FaceConfig.Scale, _config.FaceConfig.MinNeighBours, HaarDetectionType.ScaleImage, new Size(_config.FaceConfig.MinSize, _config.FaceConfig.MinSize));
                        if (faces.Count() > 0)
                        {
                            var maxArea = faces.Max(x => x.Width * x.Height);
                            roi = faces.FirstOrDefault(x => x.Width * x.Height == maxArea);
                        }
                    }
                    else
                    {
                        roi = new Rect(_config.ColorSkinCalibrationConfig.TLX, _config.ColorSkinCalibrationConfig.TLY, Math.Min(_config.ColorSkinCalibrationConfig.Width, MaxWidth), Math.Min(_config.ColorSkinCalibrationConfig.Height, MaxHeight));
                    }

                    if (roi != null && roi.Width > 0)
                    {
                        _wcropped = new Mat(frame, roi);

                        var pipeline = new Pipeline()
                                       .Pipe(new BlurFilter(_config.BlurConfig))
                                       .Pipe(new BackgroundRemoveFilter(_wbackground, _config.BackGroundRemoveConfig));
                        await pipeline.RunAsync(_wcropped);

                        _wSubstractBackground = pipeline.OutputMat;



                        HSVFilter hsvFilter = new HSVFilter();
                        hsvFilter.Apply(_wSubstractBackground);
                        _whsv = hsvFilter.Output;

                        YCrCbFilter ycrcbFilter = new YCrCbFilter();
                        ycrcbFilter.Apply(_wSubstractBackground);
                        _wycrcb = ycrcbFilter.Output;



                        var sample = ColorSkinCalibrationSample.From(_wSubstractBackground, _whsv, _wycrcb);

                        _samples.Add(sample);

                        _whisto_h = GetHistogramGraph(sample.HSV.Histogram_H, _wcropped.Width, _wcropped.Height);
                        _whisto_s = GetHistogramGraph(sample.HSV.Histogram_S, _wcropped.Width, _wcropped.Height);
                        _whisto_v = GetHistogramGraph(sample.HSV.Histogram_V, _wcropped.Width, _wcropped.Height);


                        Result = new SkinPresetHSV
                        {
                            HSV = new HSV
                            {
                                H = (int)_samples.Select(x => (double)x.HSV.MeanH.X).Median(),
                                S = (int)_samples.Select(x => (double)x.HSV.MeanS.X).Median(),
                                V = (int)_samples.Select(x => (double)x.HSV.MeanV.X).Median()
                            },
                            YCRCB = new YCrCb
                            {
                                Y  = (int)_samples.Select(x => (double)x.YCRCB.MeanY.X).Median(),
                                Cr = (int)_samples.Select(x => (double)x.YCRCB.MeanCr.X).Median(),
                                Cb = (int)_samples.Select(x => (double)x.YCRCB.MeanCb.X).Median()
                            }
                        };



                        RaisePropertyChanged(() => W_CROPPED);
                        RaisePropertyChanged(() => W_BACKGROUND_SUBSTRACTED);
                        RaisePropertyChanged(() => W_HSV);
                        RaisePropertyChanged(() => W_HISTO_H);
                        RaisePropertyChanged(() => W_HISTO_S);
                        RaisePropertyChanged(() => W_HISTO_V);
                        RaisePropertyChanged(() => W_HANDTARGET);
                    }

                    RaisePropertyChanged(() => RemainingSeconds);
                }
            }
            else
            {
                Result = new SkinPresetHSV
                {
                    HSV = new HSV
                    {
                        H = (int)_samples.Select(x => (double)x.HSV.MeanH.X).Median(),
                        S = (int)_samples.Select(x => (double)x.HSV.MeanS.X).Median(),
                        V = (int)_samples.Select(x => (double)x.HSV.MeanV.X).Median()
                    },
                    YCRCB = new YCrCb
                    {
                        Y  = (int)_samples.Select(x => (double)x.YCRCB.MeanY.X).Median(),
                        Cr = (int)_samples.Select(x => (double)x.YCRCB.MeanCr.X).Median(),
                        Cb = (int)_samples.Select(x => (double)x.YCRCB.MeanCb.X).Median()
                    }
                };
                CalibrationStarted  = false;
                CanStartCalibration = false;
                W_HANDTARGET        = null;
                RaisePropertyChanged(() => W_HANDTARGET);
            }
        }
Exemple #9
0
        private static mmStatus DataCallbackFN(IntPtr hSession, IntPtr pMediaSample, IntPtr pUserData)
        {
            try
            {
                MM_DATA mediaSample = (MM_DATA)Marshal.PtrToStructure(pMediaSample, typeof(MM_DATA));

                // ignore uncompressed frames and corrupt compressed frames
                if (((mediaSample.ContextFlag & (uint)MM_DATA_CONTEXT.MM_DATA_CONTEXT_UNCOMPRESSED_VIDEO) == 0) ||
                    ((mediaSample.ContextFlag & (uint)MM_DATA_CONTEXT.MM_DATA_CONTEXT_UNCOMPRESSED_VIDEO_CORRUPTION) == (uint)MM_DATA_CONTEXT.MM_DATA_CONTEXT_UNCOMPRESSED_VIDEO_CORRUPTION))
                {
                    return(mmStatus.MM_STS_NONE);
                }

                Console.WriteLine($"MM_DATA - Session 0x{hSession:X} FourCC {mediaSample.FourCC}");

                _samples++;
                if (_samples % CommandLine._interval != 0)
                {
                    return(mmStatus.MM_STS_NONE);
                }

                _sampled++;
                if (_sampled > CommandLine._count)
                {
                    _play = false;
                    return(mmStatus.MM_STS_NONE);
                }

                uint   managedSize = mediaSample.Width * mediaSample.Height * 3 / 2;
                byte[] managed     = new byte[managedSize];
                int    dIndex      = 0;
                int    sIndex      = 0;

                if ((MAKEFOURCC('n', 'v', '1', '2') == mediaSample.FourCC) ||
                    (MAKEFOURCC('N', 'V', '1', '2') == mediaSample.FourCC) ||
                    (MAKEFOURCC('n', 'v', '2', '1') == mediaSample.FourCC) ||
                    (MAKEFOURCC('N', 'V', '2', '1') == mediaSample.FourCC))
                {
                    for (uint y = 0; y < mediaSample.Height; y++) // y plane
                    {
                        Marshal.Copy(mediaSample.PData[0] + sIndex, managed, dIndex, (int)mediaSample.Width);
                        dIndex += (int)mediaSample.Width;
                        sIndex += (int)mediaSample.Pitch[0];
                    }
                    sIndex = 0;
                    for (uint y = 0; y < mediaSample.Height / 2; y++) // uv plane (interleaved)
                    {
                        Marshal.Copy(mediaSample.PData[1] + sIndex, managed, dIndex, (int)mediaSample.Width);
                        dIndex += (int)mediaSample.Width;
                        sIndex += (int)mediaSample.Pitch[1];
                    }
                }
                else if ((MAKEFOURCC('y', 'v', '1', '2') == mediaSample.FourCC) ||
                         (MAKEFOURCC('Y', 'V', '1', '2') == mediaSample.FourCC))
                {
                    for (uint y = 0; y < mediaSample.Height; y++) // y plane
                    {
                        Marshal.Copy(mediaSample.PData[0] + sIndex, managed, dIndex, (int)mediaSample.Width);
                        dIndex += (int)mediaSample.Width;
                        sIndex += (int)mediaSample.Pitch[0];
                    }
                    sIndex = 0;
                    for (uint y = 0; y < mediaSample.Height / 2; y++) // v plane
                    {
                        Marshal.Copy(mediaSample.PData[2] + sIndex, managed, dIndex, (int)mediaSample.Width / 2);
                        dIndex += (int)mediaSample.Width / 2;
                        sIndex += (int)mediaSample.Pitch[2];
                    }
                    sIndex = 0;
                    for (uint y = 0; y < mediaSample.Height / 2; y++) // u plane
                    {
                        Marshal.Copy(mediaSample.PData[1] + sIndex, managed, dIndex, (int)mediaSample.Width / 2);
                        dIndex += (int)mediaSample.Width / 2;
                        sIndex += (int)mediaSample.Pitch[1];
                    }
                }
                else if ((MAKEFOURCC('i', '4', '2', '0') == mediaSample.FourCC) ||
                         (MAKEFOURCC('I', '4', '2', '0') == mediaSample.FourCC) ||
                         (MAKEFOURCC('i', 'y', 'u', 'v') == mediaSample.FourCC) ||
                         (MAKEFOURCC('I', 'Y', 'U', 'V') == mediaSample.FourCC))
                {
                    for (uint y = 0; y < mediaSample.Height; y++) // y plane
                    {
                        Marshal.Copy(mediaSample.PData[0] + sIndex, managed, dIndex, (int)mediaSample.Width);
                        dIndex += (int)mediaSample.Width;
                        sIndex += (int)mediaSample.Pitch[0];
                    }
                    sIndex = 0;
                    for (uint y = 0; y < mediaSample.Height / 2; y++) // u plane
                    {
                        Marshal.Copy(mediaSample.PData[1] + sIndex, managed, dIndex, (int)mediaSample.Width / 2);
                        dIndex += (int)mediaSample.Width / 2;
                        sIndex += (int)mediaSample.Pitch[1];
                    }
                    sIndex = 0;
                    for (uint y = 0; y < mediaSample.Height / 2; y++) // v plane
                    {
                        Marshal.Copy(mediaSample.PData[2] + sIndex, managed, dIndex, (int)mediaSample.Width / 2);
                        dIndex += (int)mediaSample.Width / 2;
                        sIndex += (int)mediaSample.Pitch[2];
                    }
                }
                else
                {
                    _sts = mmStatus.MM_STS_SRC_ERROR_INVALID_DATA;
                    Console.WriteLine($"MM_DATA - Session 0x{hSession:X} {mediaSample.FourCC} Not Processed");
                    _play = false;
                }

#if USING_OPEN_CV
                ColorConversionCodes cs;

                if ((MAKEFOURCC('n', 'v', '1', '2') == mediaSample.FourCC) ||
                    (MAKEFOURCC('N', 'V', '1', '2') == mediaSample.FourCC) ||
                    (MAKEFOURCC('n', 'v', '2', '1') == mediaSample.FourCC) ||
                    (MAKEFOURCC('N', 'V', '2', '1') == mediaSample.FourCC))
                {
                    cs = ColorConversionCodes.YUV2BGR_NV12;
                }
                else if ((MAKEFOURCC('y', 'v', '1', '2') == mediaSample.FourCC) ||
                         (MAKEFOURCC('Y', 'V', '1', '2') == mediaSample.FourCC))
                {
                    cs = ColorConversionCodes.YUV2BGR_YV12;
                }
                else if ((MAKEFOURCC('i', '4', '2', '0') == mediaSample.FourCC) ||
                         (MAKEFOURCC('I', '4', '2', '0') == mediaSample.FourCC) ||
                         (MAKEFOURCC('i', 'y', 'u', 'v') == mediaSample.FourCC) ||
                         (MAKEFOURCC('I', 'Y', 'U', 'V') == mediaSample.FourCC))
                {
                    cs = ColorConversionCodes.YUV2BGR_I420;
                }
                else
                {
                    return(mmStatus.MM_STS_NONE);
                }

                string samplePath = $"{CommandLine._path}_{_sampled}.bmp";
                Mat    picYV12    = new Mat((int)((mediaSample.Height * 3) / 2), (int)mediaSample.Width, MatType.CV_8UC1, managed);
                Mat    picBGR     = new Mat();
                Cv2.CvtColor(picYV12, picBGR, cs);
                Cv2.ImWrite(samplePath, picBGR);
#endif
            }
            catch (Exception e)
            {
                Console.WriteLine("{0} Exception caught.", e);
            }
            return(mmStatus.MM_STS_NONE);
        }
Exemple #10
0
        static void Main(string[] args)
        {
            Console.WriteLine("Hello World!");

            Mat src = new Mat(@"./t2.png");

            src = src.Resize(new Size(src.Width / 4, src.Height / 4));
            Cv2.ImShow("src", src);

            var binary = BinarizationMat(src);

            Cv2.ImShow("bin", binary);
            var fScreenMat = FindContoursMat(binary, src);

            fScreenMat = new Mat(fScreenMat,
                                 new Rect((int)(fScreenMat.Width * 0.025), (int)(fScreenMat.Height * 0.05),
                                          fScreenMat.Width - (int)(fScreenMat.Width * 0.05), fScreenMat.Height - (int)(fScreenMat.Height * 0.1)));
            Cv2.ImShow("Screen", fScreenMat);

            var m2 = SaturationGain(fScreenMat, 255);

            Cv2.ImShow("SaturationGain", m2);
            Cv2.CvtColor(m2, m2, ColorConversionCodes.BGR2GRAY);
            Mat b2 = m2.Threshold(100, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);

            Cv2.FindContours(b2, out var contours, out var hierarchy, RetrievalModes.Tree,
                             ContourApproximationModes.ApproxSimple);
            Cv2.ImShow("b2", b2);
            var dst = fScreenMat;

            foreach (var itemPoint in contours)
            {
                Console.WriteLine("_________________");
                var epsilon = 0.0075 * Cv2.ArcLength(itemPoint, true);
                var approx  = Cv2.ApproxPolyDP(itemPoint, epsilon, true);
                if (approx.FirstOrDefault().X == 0 || approx.FirstOrDefault().Y == 0)
                {
                    continue;
                }

                Cv2.DrawContours(dst, new IEnumerable <Point>[] { approx }, -1, Scalar.Green, 3);

                Console.WriteLine("Approx Angle:" + approx.Length);
                if (approx.Length == 3)
                {
                    Cv2.PutText(dst, "Triangle", approx.FirstOrDefault(), HersheyFonts.HersheyComplex, 0.5, Scalar.Red);
                }
                else if (approx.Length == 4)
                {
                    var rect        = Cv2.BoundingRect(approx);
                    var rotatedRect = Cv2.MinAreaRect(approx);
                    var box         = Cv2.BoxPoints(rotatedRect);
                    Console.WriteLine(rotatedRect.Angle);
                    Cv2.PutText(dst, rotatedRect.Angle.ToString("0.0"), approx.LastOrDefault(),
                                HersheyFonts.HersheyComplex, 0.5, Scalar.Yellow);
                    Cv2.Line(dst, new Point(box[2].X, box[2].Y), new Point(box[0].X, box[0].Y), Scalar.White, 2);

                    var aspectRatio = rect.Width / rect.Height;
                    if (aspectRatio >= 0.9 && aspectRatio <= 1.1)
                    {
                        if ((Math.Abs(rotatedRect.Angle) >= 80 && Math.Abs(rotatedRect.Angle) <= 100) ||
                            Math.Abs(rotatedRect.Angle) <= 10)
                        {
                            Cv2.PutText(dst, "Square", approx.FirstOrDefault(), HersheyFonts.HersheyComplex, 0.5,
                                        Scalar.Red);
                        }
                        else
                        {
                            Cv2.PutText(dst, "Diamond", approx.FirstOrDefault(), HersheyFonts.HersheyComplex, 0.5,
                                        Scalar.Red);
                        }
                    }
                    else
                    {
                        Cv2.PutText(dst, "Rectangle", approx.FirstOrDefault(), HersheyFonts.HersheyComplex, 0.5,
                                    Scalar.Red);
                    }
                }
                else if (approx.Length == 5)
                {
                    Cv2.PutText(dst, "Pentagon", approx.FirstOrDefault(), HersheyFonts.HersheyComplex, 0.5, Scalar.Red);
                }
                else if (approx.Length == 10)
                {
                    Cv2.PutText(dst, "Star", approx.FirstOrDefault(), HersheyFonts.HersheyComplex, 0.5, Scalar.Red);
                }
                else if (approx.Length > 10)
                {
                    Cv2.PutText(dst, "Circle", approx.FirstOrDefault(), HersheyFonts.HersheyComplex, 0.5,
                                Scalar.Red);
                }

                foreach (var item in approx)
                {
                    Console.WriteLine(item.X + " " + item.Y);
                    Cv2.Circle(dst, item.X, item.Y, 5, new Scalar(255, 0, 0), 2, LineTypes.AntiAlias);
                    Cv2.ImShow("dst", dst);
                    //Cv2.WaitKey();
                }
                //foreach (var item in itemPoint) Console.WriteLine(item.X + " " + item.Y);
                //Console.WriteLine("_________________");
            }
            Cv2.ImShow("dst", dst);
            Cv2.WaitKey();
        }
    private void DemoIRBlobTrack()
    {
        int IRWidth  = kinectManager.IRWidth;
        int IRHeight = kinectManager.IRHeight;

        //get image and convert to threshold image
        Mat irImage = new Mat(IRHeight, IRWidth, MatType.CV_8UC4, kinectManager.IRRawData);              //rows=height, cols=width
        Mat ir8Bit  = new Mat();

        Cv2.CvtColor(irImage, ir8Bit, ColorConversionCodes.RGBA2GRAY);
        Cv2.Threshold(ir8Bit, ir8Bit, thresh: 200, maxval: 255, type: ThresholdTypes.Binary);

        //Find blobs
        SimpleBlobDetector.Params detectorParams = new SimpleBlobDetector.Params
        {
            //MinDistBetweenBlobs = 10, // 10 pixels between blobs
            //MinRepeatability = 1,

            //MinThreshold = 100,
            //MaxThreshold = 255,
            //ThresholdStep = 5,

            FilterByArea = false,
            //FilterByArea = true,
            //MinArea = 0.001f, // 10 pixels squared
            //MaxArea = 500,

            FilterByCircularity = false,
            //FilterByCircularity = true,
            //MinCircularity = 0.001f,

            FilterByConvexity = false,
            //FilterByConvexity = true,
            //MinConvexity = 0.001f,
            //MaxConvexity = 10,

            FilterByInertia = false,
            //FilterByInertia = true,
            //MinInertiaRatio = 0.001f,

            FilterByColor = false
                            //FilterByColor = true,
                            //BlobColor = 255 // to extract light blobs
        };

        SimpleBlobDetector simpleBlobDetector = SimpleBlobDetector.Create(detectorParams);

        KeyPoint[] blobs = simpleBlobDetector.Detect(ir8Bit);


        foreach (KeyPoint kp in blobs)
        {
            Vector2 blobPt = new Vector2(kp.Pt.X, kp.Pt.Y);

            //transform ir point to unity world space
            Vector2 irDimensions = new Vector2(kinectManager.IRWidth, kinectManager.IRHeight);
            irTrack.transform.localPosition = KinectCVUtilities.TransformTextureToUnity(irPlane, irDimensions, blobPt) + irOffset;


            //transform ir point to color space, then world space
            DepthSpacePoint depthPt = new DepthSpacePoint();
            depthPt.X = blobPt.x;
            depthPt.Y = blobPt.y;
            double          depth         = GetAvg(kinectManager.DepthData, (int)depthPt.X, (int)depthPt.Y, kinectManager.DepthWidth, kinectManager.DepthHeight);
            ColorSpacePoint colorMappedPt = kinectManager.Sensor.CoordinateMapper.MapDepthPointToColorSpace(depthPt, (ushort)depth);

            Vector2 colorDimensions = new Vector2(kinectManager.ColorWidth, kinectManager.ColorHeight);
            Vector2 colorPt         = new Vector2(colorMappedPt.X, colorMappedPt.Y);
            colorTrack.transform.localPosition = KinectCVUtilities.TransformTextureToUnity(colorPlane, colorDimensions, colorPt) + colorOffset;
        }


        //convert back to unity texture, add nice debug drawings
        Mat irImageKeyPoints = new Mat();

        Cv2.DrawKeypoints(ir8Bit, blobs, irImageKeyPoints, color: Scalar.FromRgb(255, 0, 0),
                          flags: DrawMatchesFlags.DrawRichKeypoints);

        //Convert back to RGBA32
        Mat irImageOut = new Mat(IRWidth, IRHeight, MatType.CV_8UC4);

        Cv2.CvtColor(irImageKeyPoints, irImageOut, ColorConversionCodes.BGR2RGBA);      //OpenCV is weird and has it in BGR format

        //load onto texture
        byte[] rawTextureData = KinectCVUtilities.ConvertMatToBytes(irImageOut);

        if (overrideIRTexture)
        {
            kinectManager.IRTexture.LoadRawTextureData(rawTextureData);
            kinectManager.IRTexture.Apply();
        }
    }
Exemple #12
0
        public string CircleDetect()
        {
            Cv2.CvtColor(processImg, processImg, ColorConversionCodes.BGR2GRAY);
            Cv2.GaussianBlur(processImg, processImg, new Size(3, 3), 1);
            Cv2.Threshold(processImg, processImg, 170, 255, ThresholdTypes.Binary);
            Cv2.Erode(processImg, processImg, new Mat());
            Mat temp = Mat.Zeros(new Size(processImg.Width, processImg.Height / 2), processImg.Type()).ToMat();

            temp.PushBack(processImg);
            processImg = temp;
            processImg.PushBack(Mat.Zeros(new Size(processImg.Width, processImg.Height / 2), processImg.Type()).ToMat());
            CircleSegment[] circles = Cv2.HoughCircles(processImg, HoughMethods.Gradient, 1, Math.Max(processImg.Cols, processImg.Rows) / 20, 70, 9, 12, 25);
            //Cv2.Erode(processImg, processImg, new Mat(), null, 2);
            //CircleSegment[] circles2 = Cv2.HoughCircles(processImg, HoughMethods.Gradient, 0.8, Math.Max(processImg.Cols, processImg.Rows) / 20, 10, 15, 1, 7);
            //Cv2.CvtColor(processImg, processImg, ColorConversionCodes.GRAY2BGR);

            Mat                  test;
            StringBuilder        passorfail = new StringBuilder();
            int                  cnt        = -1;
            List <CircleSegment> fails      = new List <CircleSegment>();

            foreach (var circle in circles.OrderBy(x => x.Center.X).Reverse().ToList())
            {
                int rad = (int)circle.Radius;

                test = processImg[new Rect((int)(circle.Center.X - rad), (int)(circle.Center.Y - rad), rad * 2, rad * 2)];



                Mat compare = new Mat(test.Size(), test.Type());
                Cv2.Circle(compare, new Point(compare.Cols / 2, compare.Rows / 2), (int)circle.Radius, new Scalar(255), -1);

                Cv2.BitwiseAnd(test, compare, compare);

                //Cv2.ImShow($"{cnt+=2}", compare);
                cnt += 2;

                if (compare.CountNonZero() < (compare.Cols * compare.Rows) * 0.45)
                {
                    //Cv2.ImShow($"{cnt}", compare);
                    fails.Add(circle);
                    passorfail.Append($"{cnt}, ");
                }



                //Cv2.ImShow($"{Math.Min(processImg.Cols, processImg.Rows)}", processImg);
                test.Dispose();
                compare.Dispose();
            }
            Cv2.CvtColor(processImg, processImg, ColorConversionCodes.GRAY2BGR);
            //foreach (var circle in fails)
            //{
            //    Cv2.Circle(processImg, (Point)circle.Center, (int)circle.Radius, new Scalar(0, 0, 255), 2);
            //    //Cv2.ImShow("Fails" + pcbCnt++, processImg);
            //}
            //foreach (var circle in circles2)
            //{
            //    //Cv2.Circle(imgData, new OpenCvSharp.Point((int)circle.Center.X, (int)circle.Center.Y), 5, new Scalar(255,255,0));
            //    Cv2.Circle(processImg, new Point((int)circle.Center.X, (int)circle.Center.Y), (int)circle.Radius, new Scalar(0, 255, 255), 2);
            //    //Cv2.ImShow($"{Math.Min(processImg.Cols, processImg.Rows)}", processImg);
            //    //Cv2.WaitKey();
            //    //Cv2.Rectangle(imgData, new Rect((int)circle.Center.X - (int)circle.Radius, (int)circle.Center.Y - (int)circle.Radius, 2 * (int)circle.Radius, 2 * (int)circle.Radius), new Scalar(255,255,0), -1);
            //}

            //Cv2.ImShow($"{Math.Min(processImg.Cols,processImg.Rows)}", processImg);

            return(passorfail.ToString());
        }
        /// <summary>
        /// 材质识别
        /// </summary>
        public static List <string> TextureFind(Mat img, Point[] points)
        {
            Mat    src = img;
            Mat    hsv = new Mat(), threshold = new Mat();
            Mat    dst = new Mat(), dst2 = new Mat();
            Scalar HsvWhiteLow  = new Scalar(0, 0, 1);                //白 white
            Scalar HsvWhiteHigh = new Scalar(0.1156, 0.2480, 0.9804);
            Scalar HsvGraLow    = new Scalar(0, 0, 46);               //灰 gray
            Scalar HsvGraHigh   = new Scalar(180, 43, 220);
            Scalar HsvGrayLow   = new Scalar(0.4815, 0.0720, 0.4902); //深灰
            Scalar HsvGrayHigh  = new Scalar(0.4583, 0.0656, 0.2392);
            Scalar HsvGlassLow  = new Scalar(171, 1, 80);             //Glass gray
            Scalar HsvGlassHigh = new Scalar(171, 5, 60);
            Scalar HsvBlueLow   = new Scalar(100, 43, 46);            //蓝 blue
            Scalar HsvBlueHigh  = new Scalar(124, 255, 255);

            Cv2.CvtColor(src, hsv, ColorConversionCodes.BGR2HSV);
            Scalar[]  HsvLow    = { HsvWhiteLow, HsvGlassLow, HsvGraLow, HsvBlueLow, HsvGrayLow };
            Scalar[]  HsvHigh   = { HsvWhiteHigh, HsvGlassHigh, HsvGraHigh, HsvBlueHigh, HsvGrayHigh };
            string[]  textcolor = { "White", "Glass", "Blue", "Metal ash", "Gray" };
            Point[][] contours  = { points };
            int       cc        = 0;
            Point     center    = new Point();

            HierarchyIndex[] hierarchy;

            //List<string> Texture = new List<string>();
            //List<Point> Centerpoint = new List<Point>();
            for (int color = 0; color < HsvLow.Length; color++)
            {
                Cv2.InRange(hsv, HsvLow[color], HsvHigh[color], threshold);
                Cv2.Threshold(threshold, threshold, 1, 255, ThresholdTypes.Binary);
                Cv2.CopyMakeBorder(threshold, dst, 1, 1, 1, 1, BorderTypes.Constant, 0);
                //Cv2.FindContours(dst, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);
                int count = 0;
                if (count < 1)
                {
                    Rect bound = Cv2.BoundingRect(points);

                    center = new Point(bound.X + bound.Width / 2, bound.Y + bound.Height / 2);
                    int[] vs = { center.X, center.Y };
                    char  x  = threshold.At <char>(vs);
                    if (x > 0)
                    {
                        //Cv2.DrawContours(src, contours, 0, Scalar.Red, 1, LineTypes.Link8);
                        //Cv2.PutText(src, textcolor[color], center, HersheyFonts.HersheyComplex,1, Scalar.Black, 1, LineTypes.Link8);
                        //Cv2.Circle(src, center, 10, Scalar.Gold);
                        cc = color;
                    }
                }
            }
            Centerpoint.Add(center);
            switch (textcolor[cc])
            {
            case "Gray":
            case "White": Texture.Add("石头"); break;

            case "Glass":
            case "Blue": Texture.Add("玻璃"); break;

            case "Metal ash": Texture.Add("金属"); break;

            default: Texture.Add("其他"); break;
            }
            //Console.WriteLine(textcolor[cc] + "区域数量:" + count);
            //Cv2.NamedWindow("result_txe", 0);
            //Cv2.ResizeWindow("result_txe", 500, 500);
            //Cv2.ImShow("result_txe", src);
            //
            //Window.WaitKey();
            return(Texture);
        }
        /// <summary>
        /// 图像轮廓识别
        /// </summary>
        /// <param name="src"></param>
        public static List <Point[]> Findarea(Mat src)
        {
            //Mat img = src;
            Mat img = new Mat();

            src.CopyTo(img);
            Mat gray  = new Mat();
            Mat black = new Mat();

            Point[][]        contours;
            HierarchyIndex[] hierarchy;
            Point2f[]        point2Fs = new Point2f[] { };
            List <Point2f[]> point2 = new List <Point2f[]>();
            Point            p0 = new Point(0, 0), p1 = new Point(0, 0), p2 = new Point(0, 0), p3 = new Point(0, 0);

            ImageMethod.HistogramEqualization(img, img);
            Mat soX = new Mat(), soY = new Mat();

            Cv2.CvtColor(img, gray, ColorConversionCodes.BGR2GRAY, 0);
            //Cv2.Blur(gray, gray, new Size(10, 10));
            Cv2.GaussianBlur(gray, gray, new Size(5, 5), 0, 5);
            Mat k = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(5, 5)); //!!!调整size

            Cv2.MorphologyEx(gray, gray, MorphTypes.Open, k);                    //!!!调整MorphTypes
            Cv2.Erode(gray, gray, k);
            int thresh_size = (100 / 4) * 2 + 1;                                 //自适应阈值化

            Cv2.AdaptiveThreshold(gray, black, 255, 0, ThresholdTypes.Binary, thresh_size, thresh_size / 3);
            //new Window("二值图", WindowMode.FreeRatio, black);
            Cv2.FindContours(black, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple, null);
            int            resultnum  = 0;
            List <Point[]> Excontours = new List <Point[]>();

            //Point[][] Excont =contours;
            for (int i = 0; i < hierarchy.Length; i++)
            {
                double area = Cv2.ContourArea(contours[i], false);
                if (area < 50)
                {
                    continue;
                }
                RotatedRect rect = Cv2.MinAreaRect(contours[i]);
                point2Fs = rect.Points();
                Point[] po = change(rect.Points());

                //point2.Add(point2Fs);
                Excontours.Add(po);

                for (int z = 0; z < point2Fs.Length; z++)//小数位精度——2
                {
                    point2Fs[z].X = (float)Math.Round(point2Fs[z].X, 2);
                    point2Fs[z].Y = (float)Math.Round(point2Fs[z].Y, 2);
                }
                point2.Add(point2Fs);
                for (int j = 0; j < 3; j++)
                {
                    p0 = new Point(point2Fs[j].X, point2Fs[j].Y);
                    p1 = new Point(point2Fs[j + 1].X, point2Fs[j + 1].Y);
                    Cv2.Line(img, p0, p1, Scalar.Red, 1, LineTypes.Link8);
                }
                p2 = new Point(point2Fs[3].X, point2Fs[3].Y);
                p3 = new Point(point2Fs[0].X, point2Fs[0].Y);
                Point TP = new Point((((p0.X + p1.X) / 2)), ((p1.Y + p2.Y) / 2));
                Cv2.Line(img, p2, p3, Scalar.Red, 1, LineTypes.Link8);
                resultnum++;
            }
            Console.WriteLine("剔除后的轮廓数:" + resultnum);
            //return Excontours;
            //return point2;
            //Console.WriteLine(js);
            //new Window("result", WindowMode.FreeRatio, img);
            //Window.WaitKey(0);
            return(Excontours);
        }
        private static string detectBarcode(string fileName, double thresh, bool debug = false, double rotation = 0)
        {
            Console.WriteLine("\nProcessing: {0}", fileName);

            // load the image and convert it to grayscale
            var image = new Mat(fileName);

            if (rotation != 0)
            {
                rotateImage(image, image, rotation, 1);
            }

            if (debug)
            {
                Cv2.ImShow("Source", image);
                Cv2.WaitKey(1); // do events
            }

            var gray     = new Mat();
            var channels = image.Channels();

            if (channels > 1)
            {
                Cv2.CvtColor(image, gray, ColorConversion.BgrToGray);
            }
            else
            {
                image.CopyTo(gray);
            }


            // compute the Scharr gradient magnitude representation of the images
            // in both the x and y direction
            var gradX = new Mat();

            Cv2.Sobel(gray, gradX, MatType.CV_32F, xorder: 1, yorder: 0, ksize: -1);
            //Cv2.Scharr(gray, gradX, MatType.CV_32F, xorder: 1, yorder: 0);

            var gradY = new Mat();

            Cv2.Sobel(gray, gradY, MatType.CV_32F, xorder: 0, yorder: 1, ksize: -1);
            //Cv2.Scharr(gray, gradY, MatType.CV_32F, xorder: 0, yorder: 1);

            // subtract the y-gradient from the x-gradient
            var gradient = new Mat();

            Cv2.Subtract(gradX, gradY, gradient);
            Cv2.ConvertScaleAbs(gradient, gradient);

            if (debug)
            {
                Cv2.ImShow("Gradient", gradient);
                Cv2.WaitKey(1); // do events
            }


            // blur and threshold the image
            var blurred = new Mat();

            Cv2.Blur(gradient, blurred, new Size(9, 9));

            var threshImage = new Mat();

            Cv2.Threshold(blurred, threshImage, thresh, 255, ThresholdType.Binary);

            if (debug)
            {
                Cv2.ImShow("Thresh", threshImage);
                Cv2.WaitKey(1); // do events
            }


            // construct a closing kernel and apply it to the thresholded image
            var kernel = Cv2.GetStructuringElement(StructuringElementShape.Rect, new Size(21, 7));
            var closed = new Mat();

            Cv2.MorphologyEx(threshImage, closed, MorphologyOperation.Close, kernel);

            if (debug)
            {
                Cv2.ImShow("Closed", closed);
                Cv2.WaitKey(1); // do events
            }


            // perform a series of erosions and dilations
            Cv2.Erode(closed, closed, null, iterations: 4);
            Cv2.Dilate(closed, closed, null, iterations: 4);

            if (debug)
            {
                Cv2.ImShow("Erode & Dilate", closed);
                Cv2.WaitKey(1); // do events
            }


            //find the contours in the thresholded image, then sort the contours
            //by their area, keeping only the largest one

            Point[][]       contours;
            HiearchyIndex[] hierarchyIndexes;
            Cv2.FindContours(
                closed,
                out contours,
                out hierarchyIndexes,
                mode: ContourRetrieval.CComp,
                method: ContourChain.ApproxSimple);

            if (contours.Length == 0)
            {
                throw new NotSupportedException("Couldn't find any object in the image.");
            }

            var contourIndex       = 0;
            var previousArea       = 0;
            var biggestContourRect = Cv2.BoundingRect(contours[0]);

            while ((contourIndex >= 0))
            {
                var contour = contours[contourIndex];

                var boundingRect     = Cv2.BoundingRect(contour); //Find bounding rect for each contour
                var boundingRectArea = boundingRect.Width * boundingRect.Height;
                if (boundingRectArea > previousArea)
                {
                    biggestContourRect = boundingRect;
                    previousArea       = boundingRectArea;
                }

                contourIndex = hierarchyIndexes[contourIndex].Next;
            }


            /*biggestContourRect.Width += 10;
             * biggestContourRect.Height += 10;
             * biggestContourRect.Left -= 5;
             * biggestContourRect.Top -= 5;*/


            var barcode = new Mat(image, biggestContourRect); //Crop the image

            Cv2.CvtColor(barcode, barcode, ColorConversion.BgrToGray);

            Cv2.ImShow("Barcode", barcode);
            Cv2.WaitKey(1); // do events

            var barcodeClone = barcode.Clone();
            var barcodeText  = getBarcodeText(barcodeClone);

            if (string.IsNullOrWhiteSpace(barcodeText))
            {
                Console.WriteLine("Enhancing the barcode...");
                //Cv2.AdaptiveThreshold(barcode, barcode, 255,
                //AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 9, 1);
                //var th = 119;
                var th = 100;
                Cv2.Threshold(barcode, barcode, th, 255, ThresholdType.ToZero);
                Cv2.Threshold(barcode, barcode, th, 255, ThresholdType.Binary);
                barcodeText = getBarcodeText(barcode);
            }

            Cv2.Rectangle(image,
                          new Point(biggestContourRect.X, biggestContourRect.Y),
                          new Point(biggestContourRect.X + biggestContourRect.Width, biggestContourRect.Y + biggestContourRect.Height),
                          new Scalar(0, 255, 0),
                          2);

            if (debug)
            {
                Cv2.ImShow("Segmented Source", image);
                Cv2.WaitKey(1); // do events
            }

            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();

            return(barcodeText);
        }
        public void Labeling_example()
        {
            var srcImage = new Mat("./TextSample.png");

            Cv2.ImShow("Source", srcImage);
            Cv2.WaitKey(1);

            var binaryImage = new Mat(srcImage.Size(), MatType.CV_8UC1);

            Cv2.CvtColor(srcImage, binaryImage, ColorConversionCodes.BGRA2GRAY);
            Cv2.Threshold(binaryImage, binaryImage, thresh: 100, maxval: 255, type: ThresholdTypes.Binary);

            var detectorParams = new SimpleBlobDetector.Params
            {
                //MinDistBetweenBlobs = 10,
                //MinRepeatability = 1,

                //MinThreshold = 100,
                //MaxThreshold = 255,
                //ThresholdStep = 5,

                FilterByArea = false,
                //FilterByArea = true,
                //MinArea = 0.001f,
                //MaxArea = 500,

                FilterByCircularity = false,
                //FilterByCircularity = true,
                //MinCircularity = 0.001f,

                FilterByConvexity = false,
                //FilterByConvexity = true,
                //MinConvexity = 0.001f,
                //MaxConvexity = 10,

                FilterByInertia = false,
                //FilterByInertia = true,
                //MinInertiaRatio = 0.001f,

                FilterByColor = false
                                //FilterByColor = true,
                                //BlobColor = 255
            };
            var simpleBlobDetector = SimpleBlobDetector.Create(detectorParams);
            var keyPoints          = simpleBlobDetector.Detect(binaryImage);

            foreach (var keyPoint in keyPoints)
            {
                Debug.WriteLine("X: {0}, Y: {1}", keyPoint.Pt.X, keyPoint.Pt.Y);
            }

            var imageWithKeyPoints = new Mat();

            Cv2.DrawKeypoints(
                image: binaryImage,
                keypoints: keyPoints,
                outImage: imageWithKeyPoints,
                color: Scalar.FromRgb(255, 0, 0),
                flags: DrawMatchesFlags.DrawRichKeypoints);


            Cv2.ImShow("Key Points", imageWithKeyPoints);

            Cv2.DestroyAllWindows();
            srcImage.Dispose();
            imageWithKeyPoints.Dispose();
        }
Exemple #17
0
        private void dealimage(String path, String savepath)
        {
            Mat    result = Cv2.ImRead(path);
            Scalar color  = new Scalar(0, 0, 0);

            Cv2.CopyMakeBorder(result, result, 10, 10, 10, 10, BorderTypes.Constant, color);

            Mat outp = new Mat();

            Cv2.CvtColor(result, outp, ColorConversionCodes.BGR2GRAY);

            Mat thresh = new Mat();

            Cv2.Threshold(outp, thresh, 0, 255, ThresholdTypes.Binary);

            /* Cv2.ImShow("2", thresh);
             * Cv2.WaitKey(-1);*/
            OpenCvSharp.Point[][] counts;
            HierarchyIndex[]      hierarchyIndices;
            Cv2.FindContours(thresh.Clone(), out counts, out hierarchyIndices, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            double max = 0;

            OpenCvSharp.Point[] point = null;
            foreach (var count in counts)
            {
                if (max < Cv2.ContourArea(count))
                {
                    point = count;
                    max   = Cv2.ContourArea(count);
                }
            }
            Console.WriteLine(thresh.Rows);
            Console.WriteLine(thresh.Cols);
            /*int** mask = new int[][];*/
            Rect rect = Cv2.BoundingRect(point);
            Mat  mat  = Mat.Zeros(thresh.Rows, thresh.Cols, thresh.Type());

            Cv2.Rectangle(mat, rect.TopLeft, rect.BottomRight, 255, -1);
            Mat minRect = mat.Clone();
            Mat sub     = mat.Clone();

            while (Cv2.CountNonZero(sub) > 0)
            {
                Cv2.Erode(minRect, minRect, null);
                Cv2.Subtract(minRect, thresh, sub);
            }
            Cv2.FindContours(minRect.Clone(), out counts, out hierarchyIndices, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            max = 0;
            foreach (var count in counts)
            {
                if (max < Cv2.ContourArea(count))
                {
                    point = count;
                    max   = Cv2.ContourArea(count);
                }
            }
            rect     = Cv2.BoundingRect(point);
            result   = new Mat(result, rect);
            savepath = savepath + "/" + "result.jpg";
            Cv2.ImWrite(savepath, result);
            try
            {
                pictureBox1.Image = Image.FromFile(savepath);
            }
            catch (Exception e)
            {
            }
            MessageBox.Show("拼接成功");
        }
Exemple #18
0
    /// <summary>
    /// Detector
    /// </summary>
    /// <param name="inputTexture">Input Unity texture</param>
    /// <param name="texParams">Texture parameters (flipped, rotated etc.)</param>
    /// <param name="detect">Flag signalling whether we need detection on this frame</param>
    public virtual void ProcessTexture(T texture, OpenCvSharp.Unity.TextureConversionParams texParams, bool detect = true)
    {
        // convert Unity texture to OpenCv::Mat
        ImportTexture(texture, texParams);

        // detect
        if (detect)
        {
            double invF = 1.0 / appliedFactor;
            DataStabilizer.ThresholdFactor = invF;

            // convert to grayscale and normalize
            Mat gray = new Mat();
            Cv2.CvtColor(processingImage, gray, ColorConversionCodes.BGR2GRAY);

            // fix shadows
            Cv2.EqualizeHist(gray, gray);

            /*Mat normalized = new Mat();
             * CLAHE clahe = CLAHE.Create();
             * clahe.TilesGridSize = new Size(8, 8);
             * clahe.Apply(gray, normalized);
             * gray = normalized;*/

            // detect matching regions (faces bounding)
            Rect[] rawFaces = cascadeFaces.DetectMultiScale(gray, 1.2, 6);
            if (Faces.Count != rawFaces.Length)
            {
                Faces.Clear();
            }

            // now per each detected face draw a marker and detect eyes inside the face rect
            int facesCount = 0;
            for (int i = 0; i < rawFaces.Length; ++i)
            {
                Rect faceRect       = rawFaces[i];
                Rect faceRectScaled = faceRect * invF;
                using (Mat grayFace = new Mat(gray, faceRect))
                {
                    // another trick: confirm the face with eye detector, will cut some false positives
                    if (cutFalsePositivesWithEyesSearch && null != cascadeEyes)
                    {
                        Rect[] eyes = cascadeEyes.DetectMultiScale(grayFace);
                        if (eyes.Length == 0 || eyes.Length > 2)
                        {
                            continue;
                        }
                    }

                    // get face object
                    DetectedFace face = null;
                    if (Faces.Count < i + 1)
                    {
                        face = new DetectedFace(DataStabilizer, faceRectScaled);
                        Faces.Add(face);
                    }
                    else
                    {
                        face = Faces[i];
                        face.SetRegion(faceRectScaled);
                    }

                    // shape
                    facesCount++;
                    if (null != shapeFaces)
                    {
                        Point[] marks = shapeFaces.DetectLandmarks(gray, faceRect);

                        // we have 68-point predictor
                        if (marks.Length == 68)
                        {
                            // transform landmarks to the original image space
                            List <Point> converted = new List <Point>();
                            foreach (Point pt in marks)
                            {
                                converted.Add(pt * invF);
                            }

                            // save and parse landmarks
                            face.SetLandmarks(converted.ToArray());
                        }
                    }
                }
            }

            // log
            //UnityEngine.Debug.Log(String.Format("Found {0} faces", Faces.Count));
        }
    }
Exemple #19
0
        Mat EqualizarHistogramaHSV()
        {
            Mat imagemHSV = new Mat();

            //converte uma imagem RGB para HSV
            Cv2.CvtColor(image, imagemHSV, ColorConversionCodes.BGR2HSV);
            Mat[] planosHSV;
            //divide a imagem HSV em 3 planos de pixels
            Cv2.Split(imagemHSV, out planosHSV);
            //split(imagemHSV, planosHSV);
            //obtem apenas o plano H
            Mat H = planosHSV[2];

            int[] histograma = new int[256];
            for (int i = 0; i < 256; i++)
            {
                histograma[i] = 0;
            }

            int nivel = 0;

            for (int x = 0; x < H.Rows; x++)
            {
                for (int y = 0; y < H.Cols; y++)
                {
                    nivel              = H.At <byte>(x, y);
                    histograma[nivel] += 1;
                }
            }
            //percorre apenas o plano H

            var area = H.Rows * H.Cols;

            decimal[] p = histograma.Select(x => ((decimal)x) / area).ToArray();

            decimal[] pa = new decimal[256];
            for (int i = 0; i < 256; i++)
            {
                decimal amount = 0;
                for (int j = 0; j <= i; j++)
                {
                    amount += p[j];
                }
                pa[i] = amount;
            }
            int[] s = new int[256];
            for (int i = 0; i < 256; i++)
            {
                s[i] = (int)Math.Round((255 * pa[i]));
            }

            for (int x = 0; x < H.Rows; x++)
            {
                for (int y = 0; y < H.Cols; y++)
                {
                    byte pixel = H.At <byte>(x, y);
                    H.Set <byte>(x, y, (byte)s[pixel]);
                }
            }

            //combina os 3 planos de pixels (H,S,V) novamente
            Cv2.Merge(planosHSV, imagemHSV);
            Mat imagemSaida = new Mat();

            //converte uma imagem HSV para RGB
            Cv2.CvtColor(imagemHSV, imagemSaida, ColorConversionCodes.HSV2BGR);
            return(imagemSaida);
        }
        private void OpenImage_Click(object sender, EventArgs e)
        {
            OpenFileDialog Openfile = new OpenFileDialog();

            if (Openfile.ShowDialog() == DialogResult.OK)
            {
                Mat input     = new Mat(Openfile.FileName, ImreadModes.Color);
                Mat grayScale = new Mat();

                Tools.soften(ref input);
                OpenCvSharp.Point[][] contours;
                HierarchyIndex[]      hierarchie_indexes;


                Cv2.CvtColor(input, grayScale, ColorConversionCodes.BGRA2GRAY); //Canny edge detector to remove soft edges. Убираем "мягкие переходы после блюра
                Cv2.Canny(grayScale, grayScale, 50, 150, 3);
                Cv2.FindContours(grayScale, out contours, out hierarchie_indexes, mode: RetrievalModes.External, method: ContourApproximationModes.ApproxSimple);
                Cv2.DrawContours(input, contours, -1, color: Color.blue, 2, lineType: LineTypes.Link8, hierarchy: hierarchie_indexes, maxLevel: int.MaxValue);

                var handContour = Tools.FindBiggestContour(ref contours); // looking for biggest contour in image. Находим наибольший контур на изображении, выбирая максимальный по размеру контейнер с координатами контура
                var hullPoints  = Cv2.ConvexHull(handContour);



                List <List <OpenCvSharp.Point> > all_pts          = new List <List <OpenCvSharp.Point> >();
                List <OpenCvSharp.Point>         exclusive_points = new List <OpenCvSharp.Point>();


                for (int it = 0; it < hullPoints.Length; it++)
                {
                    exclusive_points.Add(hullPoints[it]);
                }

                all_pts.Add(exclusive_points);


                //Cv2.Polylines(input, all_pts, true, color: new Scalar(0, 12, 255)); // looking for outside controu of whole palm. Находим внешний контур ко всей ладони

                List <OpenCvSharp.Point> cnt = new List <OpenCvSharp.Point>();
                for (int it = 0; it < handContour.Length; it++)
                {
                    cnt.Add(handContour[it]);
                }

                // Поиск центра ладони по ближайшей средней точке между min экстремумами контура ладони
                // Looking for center point of palm using minimal extremums mean values

                Rect bounding_rectangle = Cv2.BoundingRect(hullPoints);

                OpenCvSharp.Point center_of_palm = new OpenCvSharp.Point(
                    (bounding_rectangle.TopLeft.X + bounding_rectangle.BottomRight.X) / 2,
                    (bounding_rectangle.TopLeft.Y + bounding_rectangle.BottomRight.Y) / 2
                    );


                //Cv2.Circle(input, center: center_of_palm, radius: 6, color: new Scalar(17, 255, 21), lineType: LineTypes.Link4, thickness: 8);


                //for (int i = 0; i < hullPoints.Length; i++)
                //{

                //    Cv2.Circle(input, center: hullPoints[i], radius: 6, color: Color.unfiltered);
                //}

                Tools.SimplifyNeighbors(ref exclusive_points, ref center_of_palm);
                Tools.RemoveWristPoints(ref exclusive_points, input.Cols, input.Rows); // удаляем точки, сопряженные с краем изображения, в данном случае запястья


                for (int i = 0; i < exclusive_points.Count; i++)
                {
                    Cv2.Circle(input, center: exclusive_points[i], radius: 2, color: Color.red, thickness: 6);
                }

                Cv2.Resize(input, input, dsize: new OpenCvSharp.Size(640, 480));
                Viewport.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(input);
            }
        }
Exemple #21
0
        private void button1_Click(object sender, EventArgs e)
        {
            int camindex  = comboBox2.SelectedIndex;
            var capture   = new VideoCapture(camindex);
            int fps       = 30;
            int sleepTime = (int)Math.Round((decimal)1000 / fps);

            using (var window = new Window("capture"))
            {
                Mat img = new Mat();
                Mat dst = new Mat();
                while (enable == 1)
                {
                    capture.Read(img);
                    //if (int.Parse(textBox1.Text) == null) textBox1.Text = "0";
                    if (img.Empty())
                    {
                        break;
                    }

                    Cv2.CvtColor(img, dst, ColorConversionCodes.BGR2GRAY);
                    Cv2.Resize(dst, dst, new OpenCvSharp.Size(255, 255));
                    Cv2.AdaptiveThreshold(dst, dst, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 11, int.Parse(textBox1.Text));
                    //dst2 = cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY,11,float(args[1]))

                    int height = dst.Height;
                    int width  = dst.Width;
                    unsafe
                    {
                        int           kokuten = 0;
                        StringBuilder LK      = new StringBuilder(""); //Line Kisuu  ����C���̕�����
                        StringBuilder LG      = new StringBuilder(""); //Line Guusuu �������C���̕�����



                        byte *b = dst.DataPointer;
                        for (int i = 0; i < height; i++)
                        {
                            int oldx = 0;
                            int flug = 0;
                            for (int j = 0; j < width; j++)
                            {
                                //byte valueAt = b[0];
                                // b[0] = (byte)(b[0]/2);
                                b += 1;
                                if (b[0] == 0)
                                {
                                    flug += 1;
                                    if (flug == 1)
                                    {
                                        oldx = j;
                                    }
                                }
                                else
                                {
                                    if (flug != 0)
                                    {
                                        kokuten++;
                                        if (i % 2 == 0)
                                        {
                                            //snprintf(oneelement, sizeof(oneelement), "%d %d %d\n", oldx, x - 1, y);
                                            //strcat(LK, oneelement);
                                            LK.AppendFormat("{0} {1} {2}\n", oldx, j - 1, i);
                                        }
                                        else
                                        {
                                            //snprintf(oneelement, sizeof(oneelement), "%d %d %d\n", oldx, x - 1, y);
                                            //strcat(LG, oneelement);
                                            LG.AppendFormat("{0} {1} {2}\n", oldx, j - 1, i);
                                        }
                                        flug = 0;
                                    }
                                }
                            }
                            if (flug != 0)
                            {    //last pixel
                                kokuten++;
                                if (i % 2 == 0)
                                {
                                    //char oneelement[64] = "";
                                    //snprintf(oneelement, sizeof(oneelement), "%d %d %d\n", oldx, 255, y);
                                    //strcat(LK, oneelement);
                                    LK.AppendFormat("{0} {1} {2}\n", oldx, 255, i);
                                }
                                else
                                {
                                    //char oneelement[64] = "";
                                    //snprintf(oneelement, sizeof(oneelement), "%d %d %d\n", oldx, 255, y);
                                    //strcat(LG, oneelement);
                                    LG.AppendFormat("{0} {1} {2}\n", oldx, 255, i);
                                }
                                flug = 0;
                            }
                        }

                        LK.Append(LG);
                        queue.Enqueue(LK);
                    }



                    window.ShowImage(dst);
                    Cv2.WaitKey(sleepTime);
                }
                Cv2.DestroyWindow("capture");
            }
        }
Exemple #22
0
        private void MakeImageVintage(string name, Mat img)
        {
            int dataSize = (int)(img.Total() * img.Channels());

            byte[] imgData        = new byte[dataSize];
            byte[] vintageImgData = new byte[dataSize];
            Mat    vintageImg     = new Mat();

            img.ConvertTo(vintageImg, -1, 1.25, -35); //Contrast and Brightness adjustments
            var mat3    = new Mat <Vec3b>(vintageImg);
            var indexer = mat3.GetIndexer();
            //Cv2.CvtColor(vintageImg, vintageImg, ColorConversionCodes.RGB2HSV); //Convert to HSV
            Mat hsvImg = new Mat();

            Cv2.CvtColor(vintageImg, hsvImg, ColorConversionCodes.BGR2HSV);
            ProgressBar.Value++;
            Mat[] hsv = Cv2.Split(hsvImg);
            hsv[0] -= 5;  //Hue change
            hsv[1] += 20; //Saturation increase
            hsv[2] -= 10; //Lightness increase
            Cv2.Merge(hsv, hsvImg);
            Cv2.CvtColor(hsvImg, vintageImg, ColorConversionCodes.HSV2BGR);
            ProgressBar.Value++;
            magentaOverlay = new Mat(img.Rows, img.Cols, img.Type(), new Scalar(144, 0, 255)); //BGR
            double temp;

            for (int y = 0; y < vintageImg.Height; y++)
            {
                for (int x = 0; x < vintageImg.Width; x++)
                {
                    Vec3b pix = vintageImg.At <Vec3b>(y, x);//BGR

                    temp = pix.Item0 * 0.7 + 32;
                    if (temp >= 255)
                    {
                        temp = 255;
                    }
                    pix.Item0 = (byte)temp;

                    temp = 0.00001 * Math.Pow(pix.Item1, 3) - 0.006 * Math.Pow(pix.Item1, 2) + 1.9 * pix.Item1;
                    if (temp >= 255)
                    {
                        temp = 255;
                    }
                    pix.Item1 = (byte)temp;

                    temp = 1.125 * pix.Item2;
                    if (temp >= 255)
                    {
                        temp = 255;
                    }
                    pix.Item2 = (byte)temp;

                    vintageImg.At <Vec3b>(y, x) = pix;
                }
            }
            Cv2.AddWeighted(magentaOverlay, 0.12, vintageImg, 0.9, 0, vintageImg);
            vintageImg.ConvertTo(vintageImg, -1, 1.1, -10);
            ProgressBar.Value++;
            //Cv2.ImShow("Preview", vintageImg);
            Cv2.ImWrite(folderBrowserDialog.SelectedPath + "/" + name, vintageImg);
        }
Exemple #23
0
        /// <summary>
        ///     Получить граници блока с текстом
        /// </summary>
        /// <param name="bmp"></param>
        /// <returns></returns>
        private Rectangle GetBoundsTextBlock(Bitmap bmp, bool cropTextBorder = false)
        {
            var firstY  = 0;
            var secondY = 0;
            var firstX  = 0;
            var secontX = 0;

            var cropW      = (int)(bmp.Width * 0.1);
            var cropH      = (int)(bmp.Height * 0.1);
            var cropBounds = new Rectangle(0 + cropW, 0 + cropH,
                                           bmp.Width - cropW * 2, bmp.Height - cropH * 2);

            var cropBmp = bmp.Clone(cropBounds, bmp.PixelFormat);

            using (var g = Graphics.FromImage(bmp))
            {
                g.Clear(System.Drawing.Color.White);
                g.CompositingMode = CompositingMode.SourceOver;
                g.DrawImage(cropBmp, cropW, cropH);
            }

            Mat mat;

            mat = bmp.ToMat();
            Cv2.CvtColor(mat, mat, ColorConversionCodes.RGB2GRAY);
            mat = mat.Blur(new Size(5, 5));

            mat = mat.Threshold(75, 255, ThresholdTypes.Binary);

            bmp = mat.ToBitmap();

            var bounds      = new Rectangle(0, 0, bmp.Width, bmp.Height);
            var newCoordSys = new NewScreenCoordinateSystem(bounds);

            var topCord   = newCoordSys.GetAllTopCoordinate().Reverse();
            var botCord   = newCoordSys.GetAllBottomCoordinate().Reverse();
            var leftCord  = newCoordSys.GetAllLeftCoordinate().Reverse();
            var rightCord = newCoordSys.GetAllRightCoordinate().Reverse();

            foreach (var item in topCord)
            {
                var color = bmp.GetPixel(item.X, item.Y);
                if (color.ToArgb() != System.Drawing.Color.White.ToArgb())
                {
                    if (cropTextBorder == false)
                    {
                        firstY = item.Y + 10;
                    }
                    else
                    {
                        firstY = item.Y;
                    }
                    break;
                }
            }

            foreach (var item in botCord)
            {
                var color = bmp.GetPixel(item.X, item.Y);
                if (color.ToArgb() != System.Drawing.Color.White.ToArgb())
                {
                    if (cropTextBorder == false)
                    {
                        secondY = item.Y - 10;
                    }
                    else
                    {
                        secondY = item.Y;
                    }
                    break;
                }
            }

            foreach (var item in leftCord)
            {
                var color = bmp.GetPixel(item.X, item.Y);
                if (color.ToArgb() != System.Drawing.Color.White.ToArgb())
                {
                    if (cropTextBorder == false)
                    {
                        firstX = item.X + 10;
                    }
                    else
                    {
                        firstX = item.X;
                    }
                    break;
                }
            }

            foreach (var item in rightCord)
            {
                var color = bmp.GetPixel(item.X, item.Y);
                if (color.ToArgb() != System.Drawing.Color.White.ToArgb())
                {
                    if (cropTextBorder == false)
                    {
                        secontX = item.X - 10;
                    }
                    else
                    {
                        secontX = item.X;
                    }
                    break;
                }
            }

            var imgBounds = new Rectangle(firstX, firstY, secontX - firstX, secondY - firstY);

            return(imgBounds);
        }
Exemple #24
0
        public void FindContours(string sLeftPictureFile, string sRightPictureFile)
        {
            Mat tokuLeft  = new Mat();
            Mat tokuRight = new Mat();
            Mat output    = new Mat();

            AKAZE akaze = AKAZE.Create();

            KeyPoint[] keyPointsLeft;
            KeyPoint[] keyPointsRight;

            Mat descriptorLeft  = new Mat();
            Mat descriptorRight = new Mat();

            DescriptorMatcher matcher; //マッチング方法

            DMatch[] matches;          //特徴量ベクトル同士のマッチング結果を格納する配列

            //画像をグレースケールとして読み込み、平滑化する
            Mat Lsrc = new Mat(sLeftPictureFile, ImreadModes.Color);

            //画像をグレースケールとして読み込み、平滑化する
            Mat Rsrc = new Mat(sRightPictureFile, ImreadModes.Color);

            //特徴量の検出と特徴量ベクトルの計算
            akaze.DetectAndCompute(Lsrc, null, out keyPointsLeft, descriptorLeft);
            akaze.DetectAndCompute(Rsrc, null, out keyPointsRight, descriptorRight);


            //画像1の特徴点をoutput1に出力
            Cv2.DrawKeypoints(Lsrc, keyPointsLeft, tokuLeft);
            Image imageLeftToku = BitmapConverter.ToBitmap(tokuLeft);

            pictureBox3.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox3.Image    = imageLeftToku;
            tokuLeft.SaveImage("result/LeftToku.jpg");



            //画像2の特徴点をoutput1に出力
            Cv2.DrawKeypoints(Rsrc, keyPointsRight, tokuRight);
            Image imageRightToku = BitmapConverter.ToBitmap(tokuRight);

            pictureBox4.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox4.Image    = imageRightToku;
            tokuRight.SaveImage("result/RightToku.jpg");

            //総当たりマッチング
            matcher = DescriptorMatcher.Create("BruteForce");
            matches = matcher.Match(descriptorLeft, descriptorRight);

            Cv2.DrawMatches(Lsrc, keyPointsLeft, Rsrc, keyPointsRight, matches, output);
            output.SaveImage(@"result\output.jpg");

            int size         = matches.Count();
            var getPtsSrc    = new Vec2f[size];
            var getPtsTarget = new Vec2f[size];

            int count = 0;

            foreach (var item in matches)
            {
                var ptSrc    = keyPointsLeft[item.QueryIdx].Pt;
                var ptTarget = keyPointsRight[item.TrainIdx].Pt;
                getPtsSrc[count][0]    = ptSrc.X;
                getPtsSrc[count][1]    = ptSrc.Y;
                getPtsTarget[count][0] = ptTarget.X;
                getPtsTarget[count][1] = ptTarget.Y;
                count++;
            }

            // SrcをTargetにあわせこむ変換行列homを取得する。ロバスト推定法はRANZAC。
            var hom = Cv2.FindHomography(
                InputArray.Create(getPtsSrc),
                InputArray.Create(getPtsTarget),
                HomographyMethods.Ransac);

            // 行列homを用いてSrcに射影変換を適用する。
            Mat WarpedSrcMat = new Mat();

            Cv2.WarpPerspective(
                Lsrc, WarpedSrcMat, hom,
                new OpenCvSharp.Size(Rsrc.Width, Rsrc.Height));

            WarpedSrcMat.SaveImage(@"result\Warap.jpg");

            //画像1の特徴点をoutput1に出力
            Image imageLeftSyaei = BitmapConverter.ToBitmap(WarpedSrcMat);

            pictureBox5.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox5.Image    = imageLeftSyaei;


            //画像2の特徴点をoutput1に出力
            Image imageRightSyaei = BitmapConverter.ToBitmap(Rsrc);

            pictureBox6.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox6.Image    = imageRightSyaei;


            Mat LmatFloat = new Mat();

            WarpedSrcMat.ConvertTo(LmatFloat, MatType.CV_16SC3);
            Mat[] LmatPlanes = LmatFloat.Split();

            Mat RmatFloat = new Mat();

            Rsrc.ConvertTo(RmatFloat, MatType.CV_16SC3);
            Mat[] RmatPlanes = RmatFloat.Split();

            Mat diff0 = new Mat();
            Mat diff1 = new Mat();
            Mat diff2 = new Mat();


            Cv2.Absdiff(LmatPlanes[0], RmatPlanes[0], diff0);
            Cv2.Absdiff(LmatPlanes[1], RmatPlanes[1], diff1);
            Cv2.Absdiff(LmatPlanes[2], RmatPlanes[2], diff2);

            Cv2.MedianBlur(diff0, diff0, 5);
            Cv2.MedianBlur(diff1, diff1, 5);
            Cv2.MedianBlur(diff2, diff2, 5);

            diff0.SaveImage("result/diff0.jpg");
            diff1.SaveImage("result/diff1.jpg");
            diff2.SaveImage("result/diff2.jpg");

            Mat wiseMat = new Mat();

            Cv2.BitwiseOr(diff0, diff1, wiseMat);
            Cv2.BitwiseOr(wiseMat, diff2, wiseMat);

            wiseMat.SaveImage("result/wiseMat.jpg");

            Mat openingMat = new Mat();

            Cv2.MorphologyEx(wiseMat, openingMat, MorphTypes.Open, new Mat());

            Mat dilationMat = new Mat();

            Cv2.Dilate(openingMat, dilationMat, new Mat());
            Cv2.Threshold(dilationMat, dilationMat, 100, 255, ThresholdTypes.Binary);
            dilationMat.SaveImage(@"result\dilationMat.jpg");

            Mat LaddMat = new Mat();
            Mat RaddMat = new Mat();

            Console.WriteLine(dilationMat.GetType());
            Console.WriteLine(Rsrc.GetType());

            // dilationMatはグレースケールなので合成先のMatと同じ色空間に変換する
            Mat dilationScaleMat = new Mat();
            Mat dilationColorMat = new Mat();

            Cv2.ConvertScaleAbs(dilationMat, dilationScaleMat);
            Cv2.CvtColor(dilationScaleMat, dilationColorMat, ColorConversionCodes.GRAY2RGB);

            Cv2.AddWeighted(WarpedSrcMat, 0.3, dilationColorMat, 0.7, 0, LaddMat);
            Cv2.AddWeighted(Rsrc, 0.3, dilationColorMat, 0.7, 0, RaddMat);

            Image LaddImage = BitmapConverter.ToBitmap(LaddMat);

            pictureBox7.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox7.Image    = LaddImage;

            Image RaddImage = BitmapConverter.ToBitmap(RaddMat);

            pictureBox8.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox8.Image    = RaddImage;

            RaddMat.SaveImage(@"result\Result.jpg");

            MessageBox.Show("Done!");
        }
Exemple #25
0
        private static void DetectBallsForArticle()
        {
            var g_src   = new Mat("0.png", LoadMode.GrayScale);
            var src     = new Mat("0.png");
            var hsv_src = new Mat();

            Cv2.CvtColor(src, hsv_src, ColorConversion.RgbToHsv);

            var background     = new Mat("background.png");
            var g_background   = new Mat("background.png", LoadMode.GrayScale);
            var hsv_background = new Mat();

            Cv2.CvtColor(background, hsv_background, ColorConversion.RgbToHsv);
            var canny = new Mat();
            var dst2  = new Mat();

            Cv2.Canny(g_src, canny, 50, 200);
            Cv2.Threshold(src, dst2, 50, 200, OpenCvSharp.ThresholdType.Binary);
            //Cv2.Subtract(g_src, g_background, dst2);
            //Cv2.Absdiff(g_src, g_background, dst2);

            //Cv2.Subtract(src, background, dst2);
            Cv2.Absdiff(src, background, dst2);

            //dst2.ImWrite("diff.bmp");

            Mat[] dst2_channels;
            Cv2.Split(dst2, out dst2_channels);

            Mat[] background_channels;
            Cv2.Split(background, out background_channels);

            Mat[] hsv_background_channels;
            Cv2.Split(hsv_background, out hsv_background_channels);

            Mat[] hsv_src_channels;
            Cv2.Split(hsv_src, out hsv_src_channels);

            var div_0 = new Mat();

            //Cv2.Divide(dst2_channels[1], background_channels[1], div_0, scale:50);
            Cv2.Divide(background_channels[1], dst2_channels[1], div_0, scale: 40);

            Mat dst2_01  = new Mat();
            Mat dst2_12  = new Mat();
            Mat dst2_012 = new Mat();

            Cv2.Absdiff(dst2_channels[0], dst2_channels[1], dst2_01);
            Cv2.Absdiff(dst2_channels[1], dst2_channels[2], dst2_12);
            Cv2.Add(dst2_01, dst2_12, dst2_012);

            var hsv_diff = Enumerable.Range(0, 3).Select(i => new Mat()).ToArray();

            for (var i = 0; i < 3; ++i)
            {
                Cv2.Absdiff(hsv_src_channels[i], hsv_background_channels[i], hsv_diff[i]);
            }

            //Cv2.Compare(dst2_channels[2], t_dst, t_dst);

            var dst3 = new Mat();

            Cv2.Threshold(dst2_012, dst3, 60, 255, ThresholdType.Binary);
            //OpenCvSharp.CPlusPlus.Cv2.CvtColor(dst2, dst3, OpenCvSharp.ColorConversion.RgbToGray);

            //var circles = OpenCvSharp.CPlusPlus.Cv2.HoughCircles(dst3, OpenCvSharp.HoughCirclesMethod.Gradient, 1, 10, minRadius:10, maxRadius: 80);
            //foreach (var circle in circles)
            //  Console.WriteLine(circle);

            //Console.WriteLine(hsv_diff[0]);

            //hsv_diff[1].ImWrite("hsv_diff_s.bmp");
            DetectBallView(hsv_diff[1], hsv_diff[0]);
            return;

            //using (new Window("src image", src))
            //using (new Window("dst image", background))
            ////using (new Window("canny", canny))
            //using (new Window("dst2 image", dst2))
            //using (new Window("diff0", dst2_channels[1]))
            //using (new Window("bg0", background_channels[1]))
            //using (new Window("dst3 image", div_0))
            using (new Window("src h", hsv_src_channels[0]))
                using (new Window("bg h", hsv_background_channels[0]))
                    using (new Window("d h", hsv_diff[0]))
                        using (new Window("src s", hsv_src_channels[1]))
                            using (new Window("bg s", hsv_background_channels[1]))
                                using (new Window("d s", hsv_diff[1]))
                                    using (new Window("src v", hsv_src_channels[2]))
                                        using (new Window("bg v", hsv_background_channels[2]))
                                            using (new Window("d v", hsv_diff[2]))
                                            {
                                                Cv2.WaitKey();
                                            }
        }
        static void Main(string[] args)
        {
            Mat flow, cflow, gray, prevgray, img_bgr;

            Point[][]        contours;
            HierarchyIndex[] hierarchy;
            prevgray = new Mat();

            VideoCapture cap = new VideoCapture();

            cap.Open(0);
            int sleepTime = (int)Math.Round(1000 / cap.Fps);

            using (Window window = new Window("capture"))
                using (Mat frame = new Mat()) // Frame image buffer
                {
                    while (true)
                    {
                        cap.Read(frame);
                        if (frame.Empty())
                        {
                            break;
                        }
                        gray    = new Mat();
                        flow    = new Mat();
                        cflow   = new Mat();
                        img_bgr = new Mat();
                        Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                        if (prevgray.Empty())
                        {
                            prevgray = gray;
                        }
                        else
                        {
                            Cv2.CalcOpticalFlowFarneback(prevgray, gray, flow, 0.5, 5, 16, 3, 5, 1.2, OpticalFlowFlags.FarnebackGaussian);
                            Cv2.CvtColor(prevgray, cflow, ColorConversionCodes.GRAY2BGR);
                            drawOptFlowMap(ref flow, ref cflow, 1.5, 16, new Scalar(0, 0, 255));
                            drawHsv(flow, out img_bgr);
                            Mat gray_bgr = new Mat();
                            gray_bgr = Mat.Zeros(frame.Rows, frame.Cols, MatType.CV_8UC1);
                            Cv2.CvtColor(img_bgr, gray_bgr, ColorConversionCodes.BGR2GRAY);
                            Cv2.Normalize(gray_bgr, gray_bgr, 0, 255, NormTypes.MinMax, MatType.CV_8UC1);
                            Cv2.Blur(gray_bgr, gray_bgr, new Size(3, 3));

                            // Detect edges using Threshold
                            Mat img_thresh = new Mat();
                            img_thresh = Mat.Zeros(frame.Rows, frame.Cols, MatType.CV_8UC1);
                            Cv2.Threshold(gray_bgr, img_thresh, 155, 255, ThresholdTypes.BinaryInv);
                            Cv2.FindContours(img_thresh, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);

                            if (contours.Length == 0)
                            {
                                throw new NotSupportedException("Couldn't find any object in the image.");
                            }

                            for (int i = 0; i < contours.Length; i++)
                            {
                                Rect box = Cv2.BoundingRect(contours[i]);
                                if (box.Width > 50 && box.Height > 50 && box.Width < 900 && box.Height < 680)
                                {
                                    Cv2.Rectangle(frame,
                                                  box.TopLeft, box.BottomRight,
                                                  new Scalar(0, 255, 0), 4);
                                }
                            }
                            window.Image = frame;
                            Char c = (Char)Cv2.WaitKey(1);
                            if (c == 27)
                            {
                                break;
                            }
                            Swap <Mat>(ref gray, ref prevgray);
                        }
                    }
                }
        }
Exemple #27
0
        static Mat DetectBallView(Mat s, Mat h)
        {
            //Console.WriteLine(mat);
            //Console.WriteLine("{0}:{1}", mat.Width, mat.Height);
            //Console.WriteLine("{0}, {1}, {2}", mat.Step(0), mat.Step(), mat.Step(1));
            //return;
            const int ballWidth = 36;

            var balls = DetectBalls(s, h, ballWidth);


            var detectM = QuickDetectBalls_Field(s.Threshold(20, 255, ThresholdType.Binary), ballWidth);

            if (true)
            {
                var debugMat = new Mat();
                Cv2.CvtColor(s, debugMat, ColorConversion.GrayToRgb);

                const int ballWidth_3 = ballWidth / 3;

                var detectCount    = 0;
                var allDetectCount = 0;
                for (var y = 0; y < detectM.GetLength(1); ++y)
                {
                    for (var x = 0; x < detectM.GetLength(0); ++x)
                    {
                        if (detectM[x, y] > 0.8 * ballWidth_3 * ballWidth_3)
                        {
                            debugMat.Circle(x * ballWidth_3 + ballWidth_3 / 2, y * ballWidth_3 + ballWidth_3 / 2, 2, Color.LightGreen.ToScalar(), -1);
                            detectCount++;
                        }
                        allDetectCount++;
                    }
                }

                foreach (var ball in balls)
                {
                    debugMat.Circle(ball.Point.X, ball.Point.Y, ballWidth / 2, Color.Orange.ToScalar(), thickness: 1);
                    debugMat.PutText(ball.H.ToString(), new OpenCvSharp.CPlusPlus.Point(ball.Point.X - ballWidth / 4, ball.Point.Y), FontFace.HersheySimplex, 0.5, Color.Red.ToScalar());
                }

                if (true)
                {
                    var p      = new Point(400, 150);
                    var center = new Point(400, 300);

                    var resPoint  = RotatePointAroundCenter(p, center, 90);
                    var resPoint2 = RotatePointAroundCenter(p, center, 45);
                    var resPoint3 = RotatePointAroundCenter(p, center, 135);
                    debugMat.Circle(resPoint.X, resPoint.Y, 3, Color.LightBlue.ToScalar(), -1);
                    debugMat.Circle(resPoint2.X, resPoint2.Y, 3, Color.LightBlue.ToScalar(), -1);
                    debugMat.Circle(resPoint3.X, resPoint3.Y, 3, Color.LightBlue.ToScalar(), -1);
                    debugMat.Circle(center.X, center.Y, 3, Color.LightBlue.ToScalar(), -1);
                    debugMat.Circle(p.X, p.Y, 3, Color.LightBlue.ToScalar(), -1);
                }

                Console.WriteLine("detect count: {0}%", 100 * detectCount / allDetectCount);
                debugMat.ImWrite("q.bmp");
                return(debugMat);
            }
        }
Exemple #28
0
    // Simple example of canny edge detect
    void ProcessImage(Mat _image)
    {
        Cv2.Flip(_image, _image, FlipMode.X);
        Cv2.Canny(_image, cannyImage, 100, 100);
        var grayImage = new Mat();

        Cv2.CvtColor(_image, grayImage, ColorConversionCodes.BGR2GRAY);
        Cv2.EqualizeHist(grayImage, grayImage);

        var face_cascade = new CascadeClassifier();

        face_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_frontalface_default.xml");
        var eye_cascade = new CascadeClassifier();

        eye_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_eye_tree_eyeglasses.xml");
        //var right_ear_cascade = new CascadeClassifier();
        //right_ear_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_mcs_leftear.xml");
        //var left_ear_cascade = new CascadeClassifier();
        //left_ear_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_mcs_rightear.xml");
        //var mouth_cascade = new CascadeClassifier();
        //mouth_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_mcs_mouth.xml");
        //Debug.Log(" ");

        var faces = face_cascade.DetectMultiScale(
            image: grayImage,
            scaleFactor: 1.3,
            minNeighbors: 5,
            flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            minSize: new Size(100, 100)
            );

        Bounds  meshRendererBounds = GetComponentInChildren <MeshRenderer>().bounds;
        Vector3 meshRendererCenter = meshRendererBounds.center;
        Vector3 maxBound           = meshRendererBounds.max;
        Vector3 minBound           = meshRendererBounds.min;

        OpenCvSharp.Rect rect      = new OpenCvSharp.Rect((int)meshRendererCenter.x + 350, (int)meshRendererCenter.y + 50, 600, 600);
        var global_rectangle_color = Scalar.FromRgb(0, 0, 255);

        Cv2.Rectangle(_image, rect, global_rectangle_color, 3);
        //Console.WriteLine("Detected faces: {0}", faces.Length);
        //Debug.Log(faces.Length);

        //var rnd = new System.Random();

        var face_count = 0;
        var eye_count  = 0;

        //var ear_count = 0;
        //var mouth_count = 0;
        foreach (var faceRect in faces)
        {
            var detectedFaceImage = new Mat(_image, faceRect);
            //Cv2.ImShow(string.Format("Face {0}", face_count), detectedFaceImage);
            //Cv2.WaitKey(1); // do events

            var facec_rectangle_color = Scalar.FromRgb(255, 0, 0);
            Cv2.Rectangle(_image, faceRect, facec_rectangle_color, 3);


            var detectedFaceGrayImage = new Mat();
            Cv2.CvtColor(detectedFaceImage, detectedFaceGrayImage, ColorConversionCodes.BGRA2GRAY);


            var eyes = eye_cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.3,
                minNeighbors: 5,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(50, 50)
                );
            foreach (var eyeRect in eyes)
            {
                var detectedEyeImage = new Mat(_image, eyeRect);
                //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
                //Cv2.WaitKey(1); // do events

                var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
                Cv2.Rectangle(_image, eyeRect, eye_rectangle_color, 3);


                var detectedEyeGrayImage = new Mat();
                Cv2.CvtColor(detectedEyeImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

                eye_count++;
            }


            //var left_ear = left_ear_cascade.DetectMultiScale(
            //    image: grayImage,
            //    scaleFactor: 1.3,
            //    minNeighbors: 5,
            //    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            //    minSize: new Size(50, 50)
            //);
            //foreach (var ear in left_ear)
            //{
            //    var detectedEarImage = new Mat(_image, ear);
            //    //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
            //    //Cv2.WaitKey(1); // do events

            //    var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
            //    Cv2.Rectangle(_image, ear, eye_rectangle_color, 3);


            //    var detectedEyeGrayImage = new Mat();
            //    Cv2.CvtColor(detectedEarImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

            //    ear_count++;
            //}


            // var right_ear = right_ear_cascade.DetectMultiScale(
            //    image: grayImage,
            //    scaleFactor: 1.3,
            //    minNeighbors: 5,
            //    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            //    minSize: new Size(50, 50)
            //);
            //foreach (var ear in right_ear)
            //{
            //    var detectedEarImage = new Mat(_image, ear);
            //    //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
            //    //Cv2.WaitKey(1); // do events

            //    var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
            //    Cv2.Rectangle(_image, ear, eye_rectangle_color, 3);


            //    var detectedEyeGrayImage = new Mat();
            //    Cv2.CvtColor(detectedEarImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

            //    ear_count++;
            //}


            // var mouth = mouth_cascade.DetectMultiScale(
            //    image: grayImage,
            //    scaleFactor: 1.3,
            //    minNeighbors: 5,
            //    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            //    minSize: new Size(50, 50)
            //);
            //foreach (var m in mouth)
            //{
            //    var detectedEarImage = new Mat(_image, m);
            //    //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
            //    //Cv2.WaitKey(1); // do events

            //    var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
            //    Cv2.Rectangle(_image, m, eye_rectangle_color, 3);


            //    var detectedEyeGrayImage = new Mat();
            //    Cv2.CvtColor(detectedEarImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

            //    mouth_count++;
            //}
            face_count++;
        }
        //Debug.Log(face_count);
        if (face_count == 1 && eye_count == 2 && !waitSoundEffect)
        {
            //Debug.Log(faces[0]);
            //Debug.Log(meshRendererCenter.x);
            //Debug.Log((int)meshRendererCenter.y + 50);
            Point origin = faces[0].Location;
            float width  = faces[0].Width;
            float height = faces[0].Height;
            // Verification si le rect de la face est bien dans la zone de photo
            if (origin.X > (int)meshRendererCenter.x + 350 &&
                origin.X + width < (int)meshRendererCenter.x + 350 + 600 &&
                origin.Y > (int)meshRendererCenter.y + 50 &&
                origin.Y + height < (int)meshRendererCenter.y + 5 + 600 &&
                width > 400 &&
                height > 400)
            {
                Debug.Log("Take photo !");
                TakePhoto();
                soundEffects.MakePhotoSound();
                waitSoundEffect = true;
            }
        }



        //Cv2.ImShow("Haar Detection", _image);
        //Cv2.WaitKey(1); // do events
    }
        private async void DoCalibrationButton_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e)
        {
            var objList             = new List <Point3f>();
            var objPoints           = new List <Point3f[]>();
            var imgPoints           = new List <Point2f[]>();
            var chessboardSize      = new Size(7, 5);
            var terminationCriteria = new TermCriteria(CriteriaType.Eps | CriteriaType.MaxIter, 30, 0.001);

            for (int y = 0; y < chessboardSize.Height; y++)
            {
                for (int x = 0; x < chessboardSize.Width; x++)
                {
                    var point = new Point3f
                    {
                        X = x,
                        Y = y,
                        Z = 0
                    };

                    objList.Add(point);
                }
            }

            foreach (var ci in calibrateImages)
            {
                var       img     = new Mat(ci.Height, ci.Width, MatType.CV_8UC4, ci.Buffer);
                Mat       grayImg = new Mat();
                Point2f[] corners;

                Cv2.CvtColor(img, grayImg, ColorConversionCodes.RGBA2GRAY);

                var result = Cv2.FindChessboardCorners(grayImg, chessboardSize, out corners, ChessboardFlags.None);

                if (result)
                {
                    var winSize        = new Size(11, 11);
                    var zeroZone       = new Size(-1, -1);
                    var refinedCorners = Cv2.CornerSubPix(grayImg, corners, winSize, zeroZone, terminationCriteria);

                    objPoints.Add(objList.ToArray());
                    imgPoints.Add(corners);

                    Cv2.DrawChessboardCorners(img, chessboardSize, refinedCorners, result);
                    Cv2.ImShow("img", img);
                    Cv2.WaitKey(500);
                }
            }

            if (objPoints.Count > 0)
            {
                var cameraMat    = new double[3, 3];
                var distCoeffVec = new double[14];
                var rVecs        = new Vec3d[0];
                var tVecs        = new Vec3d[0];

                var calResult = Cv2.CalibrateCamera(objPoints, imgPoints, new Size(frameWidth, frameHeight), cameraMat, distCoeffVec, out rVecs, out tVecs);
            }

            calibrateImages.Clear();

            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
            {
                ImageCount.Text = "Calibration Image Count: " + calibrateImages.Count;
            });
        }
Exemple #30
0
        static void Main1(string[] args)
        {
            Process[] processes  = Process.GetProcesses();
            Process   wzqProcess = null;

            foreach (var item in processes)
            {
                if (item.MainWindowTitle == "五子棋")
                {
                    Console.WriteLine(item.ProcessName);
                    Console.WriteLine(item.Id);
                    //窗口名
                    Console.WriteLine(item.MainWindowTitle);
                    Console.WriteLine(item.MainModule.FileName);
                    Console.WriteLine(item.MainModule.FileVersionInfo.FileVersion);
                    Console.WriteLine(item.MainModule.FileVersionInfo.FileDescription);
                    Console.WriteLine(item.MainModule.FileVersionInfo.Comments);
                    Console.WriteLine(item.MainModule.FileVersionInfo.CompanyName);
                    Console.WriteLine(item.MainModule.FileVersionInfo.FileName);
                    //产品名
                    Console.WriteLine(item.MainModule.FileVersionInfo.ProductName);
                    Console.WriteLine(item.MainModule.FileVersionInfo.ProductVersion);
                    Console.WriteLine(item.StartTime);
                    Console.WriteLine(item.MainWindowHandle);
                    wzqProcess = item;
                    break;
                }
            }
            Bitmap bitmap = CaptureImage.Captuer(wzqProcess);

            if (bitmap == null)
            {
                return;
            }

            //bitmap.Save("a.bmp");
            //Process.Start("mspaint", "a.bmp");
            //左上角
            //227 129
            //右下角
            //721 621
            int width  = 721 - 227;
            int height = 621 - 129;
            int step   = width * 15 / 14 / 15;

            Bitmap   wzqBoardImage = new Bitmap(width * 15 / 14, height * 15 / 14);
            Graphics g             = Graphics.FromImage(wzqBoardImage);

            //
            // 摘要:
            //     在指定位置并且按指定大小绘制指定的 System.Drawing.Image 的指定部分。
            //
            // 参数:
            //   image:
            //     要绘制的 System.Drawing.Image。
            //
            //   destRect:
            //     System.Drawing.Rectangle 结构,它指定所绘制图像的位置和大小。 将图像进行缩放以适合该矩形。
            //
            //   srcRect:
            //     System.Drawing.Rectangle 结构,它指定 image 对象中要绘制的部分。
            //
            //   srcUnit:
            //     System.Drawing.GraphicsUnit 枚举的成员,它指定 srcRect 参数所用的度量单位。
            g.DrawImage(bitmap,
                        new Rectangle(0, 0, wzqBoardImage.Width, wzqBoardImage.Height),
                        new Rectangle(227 - step / 2, 129 - step / 2, wzqBoardImage.Width, wzqBoardImage.Height),
                        GraphicsUnit.Pixel);
            g.Dispose();

            //把Bitmap转换成Mat
            Mat boardMat = BitmapConverter.ToMat(wzqBoardImage);

            //因为霍夫圆检测对噪声比较敏感,所以首先对图像做一个中值滤波或高斯滤波(噪声如果没有可以不做)
            Mat blurBoardMat = new Mat();

            Cv2.MedianBlur(boardMat, blurBoardMat, 9);

            //转为灰度图像
            Mat grayBoardMat = new Mat();

            Cv2.CvtColor(blurBoardMat, grayBoardMat, ColorConversionCodes.BGR2GRAY);

            //3:霍夫圆检测:使用霍夫变换查找灰度图像中的圆。
            CircleSegment[] circleSegments = Cv2.HoughCircles(grayBoardMat, HoughMethods.Gradient, 1, step * 0.4, 70, 30, (int)(step * 0.3), (int)(step * 0.5));

            foreach (var circleSegment in circleSegments)
            {
                Cv2.Circle(boardMat, (int)circleSegment.Center.X, (int)circleSegment.Center.Y, (int)circleSegment.Radius, Scalar.Red, 1, LineTypes.AntiAlias);
            }



            //判断棋子位置,遍历棋盘上的每个位置
            int rows = 15;
            List <Tuple <int, int, int> > chessPointList = new List <Tuple <int, int, int> >();
            //计算棋子颜色的阈值
            Scalar scalarLower = new Scalar(128, 128, 128);
            Scalar scalarUpper = new Scalar(255, 255, 255);

            //行
            for (int i = 0; i < rows; i++)
            {
                //列
                for (int j = 0; j < rows; j++)
                {
                    //棋盘棋子坐标
                    Point2f point = new Point2f(j * step + 0.5f * step, i * step + 0.5f * step);
                    foreach (var circleSegment in circleSegments)
                    {
                        //有棋子
                        if (circleSegment.Center.DistanceTo(point) < 0.5 * step)
                        {
                            //检查棋子的颜色
                            //以棋子中心为中心点,截取一部分图片(圆内切正方形),来计算图片颜色
                            //r^2 = a^2 + a^2
                            //--> a= ((r^2)/2)^-2

                            double len       = Math.Sqrt(circleSegment.Radius * circleSegment.Radius / 2);
                            Rect   rect      = new Rect((int)(circleSegment.Center.X - len), (int)(circleSegment.Center.Y - len), (int)(len * 2), (int)(len * 2));
                            Mat    squareMat = new Mat(grayBoardMat, rect);

                            //计算颜色
                            Mat calculatedMat = new Mat();
                            Cv2.InRange(squareMat, scalarLower, scalarUpper, calculatedMat);
                            float result = 100f * Cv2.CountNonZero(calculatedMat) / (calculatedMat.Width * calculatedMat.Height);

                            chessPointList.Add(new Tuple <int, int, int>(i + 1, j + 1, result < 50 ? 0 : 1));
                            break;
                        }
                    }
                }
            }

            foreach (var item in chessPointList)
            {
                Console.WriteLine($"{item.Item1},{item.Item2},{item.Item3}");
            }
            Cv2.ImShow("boardMat", boardMat);
            Cv2.WaitKey();
        }