public override void Actualizar(IEsCable nodo)
    {
        if (nodo == null)
        {
            return;
        }

        var matEntrante = nodo.MatOut();

        if (matEntrante == null)
        {
            return;
        }

        // if (generarNuevoMat)
        // {
        //     if (mat == null) mat = new Mat();
        // }
        // else
        {
            if (mat != matEntrante)
            {
                mat = matEntrante;
            }
        }
        Cv2.AdaptiveThreshold(mat, mat, valorMaximo, tipoDeAdaptativo, tipoDeUmbral, BlockSize, umbral);
        PropagarActualizacion();
    }
Esempio n. 2
0
        /// <summary>
        /// AdaptiveThreshold 大津より精度高い
        /// </summary>
        /// <param name="bitmap"></param>
        /// <param name="s"></param>
        /// <returns>true : Pass false:Fail</returns>
        public static bool AdaptiveThreshold(BitmapSource src, out BitmapSource dst)
        {
            using (Mat mat = BitmapSourceConverter.ToMat(src))
                using (Mat matbuf = new Mat())
                {
                    //Cv2.CvtColor
                    //(
                    //    mat,
                    //    matbuf,
                    //    ColorConversionCodes.BGR2GRAY
                    //);

                    Cv2.AdaptiveThreshold
                    (
                        mat,
                        matbuf,
                        255,
                        AdaptiveThresholdTypes.GaussianC,
                        ThresholdTypes.Binary,
                        9,
                        128
                    );

                    dst = matbuf.ToBitmapSource();
                }

            return(true);
        }
Esempio n. 3
0
        public string SolveThePuzzle(string filename, string key, string endpoint)
        {
            try
            {
                Mat imgSource = Cv2.ImRead(filename);
                Mat img       = new Mat();
                Cv2.CvtColor(imgSource, img, ColorConversionCodes.BGR2GRAY);
                Cv2.Blur(img, img, new Size(3, 3));
                Cv2.AdaptiveThreshold(img, img, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 15, 15);
                SegmentPuzzle(img, out img, out Rect puzzleRect);

                SplitToDigits(img, out List <Mat> digits, out List <int> puzzle, new Size(50, 50));
                Mat        digitsAll   = new Mat();
                List <Mat> blkNonEmpty = new List <Mat>();
                for (int k = 0; k != puzzle.Count; ++k)
                {
                    if (puzzle[k] != 0)
                    {
                        blkNonEmpty.Add(digits[k]);
                    }
                }
                Cv2.HConcat(blkNonEmpty.ToArray(), digitsAll);
                recognizeDigits(digitsAll, key, endpoint);

                using (new Window("Image", WindowMode.Normal, digitsAll))
                {
                    Cv2.WaitKey();
                }
                return("solved");
            }
            catch (Exception e)
            {
                return(e.Message);
            }
        }
        public dynamic PreProcessImage(ref Mat image, Mat sourceImage)
        {
            var copy = new Mat();

            try
            {
                Cv2.BilateralFilter(image, copy, 9, 75, 75);
                Cv2.AdaptiveThreshold(copy, copy, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 115, 4);
                Cv2.MedianBlur(copy, copy, 11);
                Cv2.CopyMakeBorder(copy, copy, 5, 5, 5, 5, BorderTypes.Constant, Scalar.Black);

                // TODO: Dispose new Mat()
                var otsu = Cv2.Threshold(copy, new Mat(), 0, 255, ThresholdTypes.Binary | ThresholdTypes.Otsu);
                Cv2.Canny(copy, copy, otsu, otsu * 2, 3, true);
            }
            catch
            {
                copy.Dispose();
                throw;
            }

            image.Dispose();
            image = copy;

            return(null);
        }
        /// <summary>
        /// 角度纠正
        /// </summary>
        /// <param name="Image原图"></param>
        /// <returns></returns>
        public Mat The_Angle_correct(Mat Image原图)
        {
            Mat Image旋转纠正后原图 = new Mat();

            using (Mat Image灰度 = new Mat())
                using (Mat Image自适应阈值化 = new Mat())
                //using (Mat Image旋转纠正后原图 = new Mat())
                {
                    Cv2.CvtColor(Image原图, Image灰度, ColorConversionCodes.BGR2GRAY);
                    Cv2.AdaptiveThreshold(~Image灰度, Image自适应阈值化, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 15, -2);
                    //查找轮廓
                    OpenCvSharp.Point[][] contours查找的轮廓 = Find_the_outline(Image自适应阈值化);

                    for (int i = 0; i < contours查找的轮廓.Length; i++)
                    {
                        RotatedRect rotaterect倾斜 = Cv2.MinAreaRect(contours查找的轮廓[i]);
                        if (rotaterect倾斜.Size.Height * rotaterect倾斜.Size.Width > 1000000)
                        {
                            Rect  rect轮廓 = Cv2.BoundingRect(contours查找的轮廓[i]);
                            float angle;
                            Image旋转纠正后原图.SetTo(255);
                            Point2f center = rotaterect倾斜.Center; //中心点
                            if (rotaterect倾斜.Size.Height > rotaterect倾斜.Size.Width)
                            {
                                angle = rotaterect倾斜.Angle;
                            }
                            else
                            {
                                angle = 90.0f + rotaterect倾斜.Angle;

                                //Point2f center =(Point2f)90.0- rotaterect倾斜.Center;
                            }
                            using (Mat M2 = Cv2.GetRotationMatrix2D(center, angle, 1))
                            {
                                //Cv2.WarpAffine(Image直接阈值化, Image旋转后灰度图, M2, Image直接阈值化.Size(), InterpolationFlags.Linear, 0, new Scalar(0));//仿射变换
                                Cv2.WarpAffine(Image原图, Image旋转纠正后原图, M2, Image自适应阈值化.Size(), InterpolationFlags.Linear, 0, new Scalar(0));
                                //仿射变换
                                //Image原图.Rectangle(rect轮廓, Scalar.Red);
                                //Image旋转后原图.Rectangle(rect轮廓, Scalar.Gray);
                                //var window旋转后灰度图 = new Window("旋转后灰度图");
                                //window旋转后灰度图.Image = Image旋转后灰度图;
                                //var window旋转后原图 = new Window("旋转后原图");
                                //window旋转后原图.Image = Image旋转后原图;
                            }
                        }
                    }

                    return(Image旋转纠正后原图);
                    // Bottom-Right


                    //using (new Window("Image原图", WindowMode.AutoSize, Image原图))
                    //using (new Window("Image灰度", WindowMode.AutoSize, Image灰度))
                    //using (new Window("Image自适应阈值化", WindowMode.AutoSize, Image自适应阈值化))
                    //using (new Window("Image旋转后原图", WindowMode.AutoSize, Image旋转后原图))
                    //{
                    //    Window.WaitKey(0);
                    //}
                }
        }
Esempio n. 6
0
        static List <Point[][]> AdaptiveThreshold_Based_Extract_Defect(Mat Src, List <OpenCvSharp.Point[]> contours_final)
        {
            //=========prepare adaptive threshold input
            Mat Adaptive_Src = Mat.Zeros(Src.Size(), MatType.CV_8UC1);

            //用adaptive threshold 濾出瑕疵
            Cv2.GaussianBlur(Src, Adaptive_Src, new OpenCvSharp.Size(3, 3), 0, 0);

            /*//=====================================1008test== mask inner and outer to black===================================
             * OpenCvSharp.Point[][] test = new Point[1][];
             * test[0] = contours_final[1];
             * Cv2.DrawContours(Adaptive_Src, test, -1, 0, -1);
             * Cv2.DrawContours(Adaptive_Src, test, -1, 0, 10);
             * Adaptive_Src.SaveImage("adaptive_prepocessing.jpg");
             * //==================================================================================
             */

            Cv2.AdaptiveThreshold(Adaptive_Src, Adaptive_Src, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 45, 105 / 10);
            Adaptive_Src.SaveImage("adaptive.jpg");

            //讓黑白相反(not opetation)
            Mat Src_255 = new Mat(Adaptive_Src.Size(), MatType.CV_8UC1, new Scalar(255));

            Cv2.Subtract(Src_255, Adaptive_Src, Adaptive_Src);


            // denoise
            OpenCvSharp.Point[][] temp = new Point[1][];
            Point[][]             contours;
            HierarchyIndex[]      hierarchly;
            Cv2.FindContours(Adaptive_Src, out contours, out hierarchly, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);


            foreach (OpenCvSharp.Point[] contour_now in contours)
            {
                if (Cv2.ContourArea(contour_now) < 100)
                {
                    //Console.WriteLine("Arc Length: " + (Cv2.ArcLength(contour_now, true) + " Area: " + Cv2.ContourArea(contour_now))+" Length/Area:" +(Cv2.ArcLength(contour_now, true) / Cv2.ContourArea(contour_now)));
                    OpenCvSharp.Point[] approx = Cv2.ApproxPolyDP(contour_now, 0.000, true);
                    temp[0] = approx;
                    Cv2.DrawContours(Adaptive_Src, temp, -1, 0, -1);
                }
            }

            //Mat kernel = Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(13, 7));
            //Adaptive_Src = Adaptive_Src.MorphologyEx(MorphTypes.Close, kernel);

            //=========================吃掉邊界=======================================

            temp[0] = contours_final[0];
            Cv2.DrawContours(Adaptive_Src, temp, -1, 0, 30);
            temp[0] = contours_final[1];
            Cv2.DrawContours(Adaptive_Src, temp, -1, 0, 45);

            Adaptive_Src.SaveImage("a.jpg");
            //上面已經得到defect圖,用Find_Defect_Contour_and_Extract萃取出來
            return(Find_Defect_Contour_and_Extract(Src, Adaptive_Src, contours_final));
        }
Esempio n. 7
0
        /// <summary>
        /// 图像轮廓识别
        /// </summary>
        /// <param name="src"></param>
        public static List <Point2f[]> Findarea(Mat src)
        {
            Mat img   = src;
            Mat gray  = new Mat();
            Mat black = new Mat();

            Point[][]        contours;
            HierarchyIndex[] hierarchy;
            Point2f[]        point2Fs = new Point2f[] { };
            List <Point2f[]> point2 = new List <Point2f[]>();
            Point            p0 = new Point(0, 0), p1 = new Point(0, 0), p2 = new Point(0, 0), p3 = new Point(0, 0);
            Mat soX = new Mat(), soY = new Mat();

            Cv2.CvtColor(img, gray, ColorConversionCodes.BGR2GRAY, 0);
            Cv2.Blur(gray, gray, new Size(10, 10));
            int thresh_size = (100 / 4) * 2 + 1;//自适应阈值化

            Cv2.AdaptiveThreshold(gray, black, 255, 0, ThresholdTypes.Binary, thresh_size, thresh_size / 3);
            new Window("二值图", WindowMode.FreeRatio, black);
            Cv2.FindContours(black, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple, null);
            int resultnum = 0;

            Point[][] Excontours = contours;
            for (int i = 0; i < hierarchy.Length; i++)
            {
                if (contours[i].Length < 100)
                {
                    continue;
                }
                RotatedRect rect = Cv2.MinAreaRect(contours[i]);
                point2Fs = rect.Points();
                Point[] po = change(rect.Points());
                //point2.Add(point2Fs);
                Excontours[resultnum] = po;
                for (int z = 0; z < point2Fs.Length; z++)//小数位精度——2
                {
                    point2Fs[z].X = (float)Math.Round(point2Fs[z].X, 2);
                    point2Fs[z].Y = (float)Math.Round(point2Fs[z].Y, 2);
                }
                point2.Add(point2Fs);
                for (int j = 0; j < 3; j++)
                {
                    p0 = new Point(point2Fs[j].X, point2Fs[j].Y);
                    p1 = new Point(point2Fs[j + 1].X, point2Fs[j + 1].Y);
                    Cv2.Line(img, p0, p1, Scalar.Red, 1, LineTypes.Link8);
                }
                p2 = new Point(point2Fs[3].X, point2Fs[3].Y);
                p3 = new Point(point2Fs[0].X, point2Fs[0].Y);
                Point TP = new Point((((p0.X + p1.X) / 2)), ((p1.Y + p2.Y) / 2));
                Cv2.Line(img, p2, p3, Scalar.Red, 1, LineTypes.Link8);
                resultnum++;
            }
            Console.WriteLine("剔除后的轮廓数:" + resultnum);
            return(point2);
            //Console.WriteLine(js);
            //new Window("result", WindowMode.FreeRatio, img);
            //Window.WaitKey(0);
        }
Esempio n. 8
0
        void Run(string file)
        {
            var input = File.ReadAllBytes(file);

            using (var mat = Mat.FromImageData(input))
                using (var grey = mat.CvtColor(ColorConversionCodes.BGR2GRAY))
                    using (var thresholded = new Mat())
                    {
                        Cv2.AdaptiveThreshold(grey, thresholded, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.BinaryInv, 15, 7);
                        thresholded.SaveImage(GetModifiedName(file, "thresholded"));

                        /* close the gaps between the artifacts by using dilation */
                        int dilateX = 15, dilateY = 3;
                        using (var dilateKernel = Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(dilateX, dilateY)))
                            using (var morphed = new Mat())
                            {
                                Cv2.MorphologyEx(thresholded, morphed, MorphTypes.Dilate, dilateKernel);
                                morphed.SaveImage(GetModifiedName(file, "morphed"));

                                /* find external contours */
                                Mat[] contours; var hierarchy = new Mat();
                                Cv2.FindContours(morphed, out contours, hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);

                                /* draw contours */
                                using (var contourImage = new Mat())
                                {
                                    Cv2.CvtColor(grey, contourImage, ColorConversionCodes.GRAY2BGR);
                                    Cv2.DrawContours(contourImage, contours, -1, new Scalar(0, 0, 255), 1);
                                    contourImage.SaveImage(GetModifiedName(file, "contour"));
                                }

                                var imageHeight = mat.Height;
                                var yThreshold  = 2;

                                var items = contours
                                            .Select(c => new { Contour = c, BoundingBox = c.BoundingRect() })
                                            .ToArray();

                                /* filter all contours that are near the top border or bottom border */
                                var filteredTopBottom = items
                                                        .Where(i => i.BoundingBox.Top <= yThreshold ||
                                                               (imageHeight - i.BoundingBox.Bottom) <= yThreshold)
                                                        .ToArray();

                                var filteredContours = filteredTopBottom.Select(i => i.Contour).ToArray();

                                DrawContours(mat.Width, imageHeight, GetModifiedName(file, "filtered-top-bottom-contours"),
                                             filteredContours);

                                using (var fixedImage = grey.Clone())
                                {
                                    Cv2.DrawContours(fixedImage, filteredContours, -1, new Scalar(255, 255, 255), Cv2.FILLED);
                                    fixedImage.SaveImage(GetModifiedName(file, "fixed"));
                                }
                            }
                    }
        }
        private Mat MakeThreshold()
        {
            var gray      = new Mat();
            var grayThres = new Mat();

            Cv2.CvtColor(MaskedFrame(), gray, ColorConversionCodes.BGR2GRAY);
            Cv2.AdaptiveThreshold(gray, grayThres, 150, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 5, 5);

            return(grayThres);
        }
Esempio n. 10
0
        // TODO: Optional<Mat>
        public HistChessboardModel Do(IVideo video)
        {
            HistChessboardModel model = null;
            Mat frame;
            Mat gray        = new Mat();
            Mat thresh      = new Mat();
            var corners     = new Point2f[4];
            var patternSize = new Size(7, 3);
            var threshWin   = new Window("Adaptive Threshold");

            // TODO: each iteration, try different block sizes for the adaptive threshold (height / 4, height / 2, etc)
            do
            {
                frame = video.GetNextFrame();
                if (frame != null && frame.Width + frame.Height > 0)
                {
                    Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                    //Cv2.MedianBlur(gray, gray, 5); // Disallows chessboard to be found, because of how it opens/closes corners
                    Cv2.AdaptiveThreshold(gray, thresh,
                                          maxValue: 255.0,
                                          adaptiveMethod: AdaptiveThresholdTypes.GaussianC,
                                          thresholdType: ThresholdTypes.Binary,
                                          blockSize: (gray.Height / 4) | 1,
                                          c: 0.0);
                    threshWin.ShowImage(thresh);

                    var found = Cv2.FindChessboardCorners(thresh, patternSize, out corners,
                                                          ChessboardFlags.None); //, ChessboardFlags.AdaptiveThresh | ChessboardFlags.NormalizeImage);

                    //frame.CopyTo(output);
                    //Cv2.DrawChessboardCorners(output, patternSize, corners, found);
                    //if (!found) Console.Out.WriteLine("Chessboard not found :( ");

                    if (found)
                    {
                        var       boardPoints = new Point2d[21];
                        Point2d[] foundPoints = OCVUtil.Point2fTo2d(corners);
                        for (int c = 0; c < 7; c++)
                        {
                            for (int r = 0; r < 3; r++)
                            {
                                boardPoints[r * 7 + c] = new Point2d((c + 1.0), (r + 3.0));
                            }
                        }

                        var boardToImageTransform = Cv2.FindHomography(boardPoints, foundPoints);
                        var imageToBoardTransform =
                            boardToImageTransform.Inv(); //Cv2.FindHomography(foundPoints, boardPoints);
                        model = new HistChessboardModel(boardToImageTransform, imageToBoardTransform, frame);
                    }
                }
            } while (frame != null && model == null);

            return(model);
        }
Esempio n. 11
0
        public Mat GetConvertImage(Mat input)
        {
            Mat result = new Mat();

            Cv2.CvtColor(input, result, ColorConversionCodes.BGR2GRAY);
            Cv2.AdaptiveThreshold(result, result, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, _blockSize, _c);

            Cv2.CvtColor(result, result, ColorConversionCodes.GRAY2BGR);

            return(result);
        }
Esempio n. 12
0
    void Update()
    {
        // Performance measuring purposes only, avoid reading data in Update()
        mat = Cv2.ImRead(CvUtil.GetStreamingAssetsPath("lena.png"), ImreadModes.GrayScale);

        // Timer to swithch between different thresholds
        timeElapsed += Time.deltaTime;
        if (timeElapsed > 1.5f)
        {
            timeElapsed = 0;
            mode++;
            if (mode > 4)
            {
                mode = 0;
            }
        }

        Cv2.Threshold(mat, binaryInvMat, 0, 255, ThresholdTypes.BinaryInv | ThresholdTypes.Otsu);
        Cv2.Threshold(mat, toZeroMat, 0, 255, ThresholdTypes.Tozero | ThresholdTypes.Otsu);
        Cv2.Threshold(mat, toZeroInvMat, 0, 255, ThresholdTypes.TozeroInv | ThresholdTypes.Otsu);
        Cv2.AdaptiveThreshold(mat, gaussianMat, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 7, 8);
        Cv2.Subtract(gaussianMat, toZeroMat, subtractMat);

        switch (mode)
        {
        case 0:
            mat = subtractMat;
            break;

        case 1:
            mat = binaryInvMat;
            break;

        case 2:
            mat = toZeroMat;
            break;

        case 3:
            mat = gaussianMat;
            break;

        case 4:
            mat = toZeroInvMat;
            break;

        default:
            break;
        }


        Cv2.CvtColor(mat, matRGBA, ColorConversionCodes.GRAY2RGBA);
        CvConvert.MatToTexture2D(matRGBA, ref tex);
        rawImage.texture = tex;
    }
Esempio n. 13
0
        public IplImage Binary(IplImage src, int threshold) //이진화 메서드
        {
            bin = new IplImage(src.Size, BitDepth.U8, 1);
            Cv.CvtColor(src, bin, ColorConversion.RgbToGray);

            Mat m_bin = new Mat(bin);

            Cv2.AdaptiveThreshold(m_bin, m_bin, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 21, 10);
            bin = m_bin.ToIplImage();

            return(m_bin.ToIplImage());
        }
Esempio n. 14
0
        public static SoftwareBitmap SketchEffect(SoftwareBitmap Input)
        {
            using (Mat inputMat = Input.SoftwareBitmapToMat())
                using (Mat outputMat = new Mat(inputMat.Rows, inputMat.Cols, MatType.CV_8UC4))
                {
                    Cv2.CvtColor(inputMat, outputMat, ColorConversionCodes.BGRA2GRAY);
                    Cv2.AdaptiveThreshold(outputMat, outputMat, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 11, 2);
                    Cv2.CvtColor(outputMat, outputMat, ColorConversionCodes.GRAY2BGRA);

                    return(outputMat.MatToSoftwareBitmap());
                }
        }
Esempio n. 15
0
        public static List <Rectangle> DetectCarplates(Bitmap image)
        {
            var result = new List <Rectangle>();

            var original = BitmapConverter.ToMat(image);
            var gray     = image.ToGrayscaleMat();
            var src      = new Mat();

            gray.CopyTo(src);
            var threshImage = new Mat();

            Cv2.AdaptiveThreshold(gray, threshImage, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 9, 9);
            SaveImage(threshImage, "threshhold");

            Point[][]        contours;
            HierarchyIndex[] hierarchyIndexes;
            Cv2.FindContours(threshImage, out contours, out hierarchyIndexes, RetrievalModes.CComp, ContourApproximationModes.ApproxSimple);

            if (contours.Length == 0)
            {
                return(new List <Rectangle>());
            }

            var sorted = contours
                         .OrderByDescending(x => Cv2.ContourArea(x))
                         .Take(100)
                         .Where(x =>
            {
                var rect = Cv2.BoundingRect(x);
                return(rect.IsHorizontalCarplateBlock() && GetMeanColor(gray, rect)[0] > 135);
            })
                         .ToList();

            foreach (var contour in sorted)
            {
                var boundingRect = Cv2.BoundingRect(contour);

                result.Add(boundingRect.ToRectangle());
                var meanColor = GetMeanColor(gray, boundingRect);

                Cv2.Rectangle(original,
                              new Point(boundingRect.X, boundingRect.Y),
                              new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
                              new Scalar(0, 0, 255), 2);

                Cv2.PutText(original, meanColor[0].ToString(), new Point(boundingRect.X + 10, boundingRect.Y + 10), HersheyFonts.HersheyPlain, 0.75, Scalar.Red);
            }

            SaveImage(original, "detected");

            return(result);
        }
Esempio n. 16
0
        static Mat MakeAdaptiveThresholding(Mat image)
        {
            Mat tmp = new Mat();
            Mat ret = new Mat();

            Mat srcGray = new Mat();

            Cv2.CvtColor(image, srcGray, ColorConversionCodes.BGR2GRAY);

            Cv2.AdaptiveThreshold(srcGray, tmp, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 5, 3);
            Cv2.MedianBlur(tmp, ret, 3);
            return(ret);
        }
Esempio n. 17
0
        static void Main(string[] args)
        {
            Mat src    = Cv2.ImRead("swan.jpg");
            Mat gray   = new Mat(src.Size(), MatType.CV_8UC1);
            Mat binary = new Mat(src.Size(), MatType.CV_8UC1);

            Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
            Cv2.AdaptiveThreshold(gray, binary, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 25, 5);

            Cv2.ImShow("binary", binary);
            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
        }
Esempio n. 18
0
        private void button2_Click(object sender, EventArgs e)
        {
            using (Mat src = new Mat("F:\\Microsoft Visual Studio\\project\\yoloaforge\\yoloaforge\\a.jpg", ImreadModes.AnyColor | ImreadModes.AnyDepth))
            {
                //转为灰度图像
                Mat dst = new Mat();
                Cv2.CvtColor(src, dst, ColorConversionCodes.BGR2GRAY);

                //转为二值图像

                /*
                 * API AdaptiveThreshold:
                 * 参数:1:输入的灰度图像  '~' 符号是背景色取反
                 *      2:输出的二值化图像
                 *      3:二值化的最大值
                 *      4:自适应的方法(枚举类型,目前只有两个算法)
                 *      5:阀值类型(枚举类型,这里选择二进制)
                 *      6: 块大小
                 *      7: 常量 (可以是正数 0 负数)
                 */
                Mat binImage = new Mat();
                Cv2.AdaptiveThreshold(~dst, binImage, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 15, -2);

                int xSize = dst.Cols / 16; //宽
                int ySize = dst.Rows / 16; //高

                //定义结构元素 new Size(xSize, 1) 相当于横着的一条线:水平结构体 new Size(1, ySize) 相当于竖着的一条线:垂直结构体
                InputArray kernelX = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(xSize, 1), new OpenCvSharp.Point(-1, -1));
                InputArray kernelY = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(1, ySize), new OpenCvSharp.Point(-1, -1));

                Mat result = new Mat();
                ////腐蚀
                //Cv2.Erode(binImage, result, kernelY);
                ////膨胀
                //Cv2.Dilate(result, result, kernelY);

                //开操作代替 腐蚀和膨胀
                Cv2.MorphologyEx(binImage, result, MorphTypes.Open, kernelY);
                Cv2.Blur(result, result, new OpenCvSharp.Size(3, 3), new OpenCvSharp.Point(-1, -1)); //使用归一化框过滤器平滑图像
                Cv2.BitwiseNot(result, result);                                                      //背景变成白色(背景值取反)


                using (new Window("result", WindowMode.Normal, result))
                    using (new Window("binImage", WindowMode.Normal, binImage))
                        using (new Window("dst", WindowMode.Normal, dst))
                            using (new Window("SRC", WindowMode.Normal, src))
                            {
                                Cv2.WaitKey(0);
                            }
            }
        }
Esempio n. 19
0
        public static BitmapSource AdaptiveThreshold(BitmapSource src)
        {
            using (Mat mat = BitmapSourceConverter.ToMat(src))
                using (Mat matbuf = new Mat())
                    using (Mat matbuf2 = new Mat())
                    {
                        //グレイスケール化
                        Cv2.CvtColor
                        (
                            mat,
                            matbuf,
                            ColorConversionCodes.BayerBG2GRAY
                        );

                        //強めのフィルタ処理
                        //Cv2.BilateralFilter
                        //(
                        //    matbuf,
                        //    mat,
                        //    7,
                        //    35,
                        //    5
                        //);

                        Cv2.FastNlMeansDenoising
                        (
                            matbuf,
                            mat
                        );

                        Cv2.AdaptiveThreshold
                        (
                            mat,
                            matbuf,
                            255,
                            AdaptiveThresholdTypes.GaussianC,
                            ThresholdTypes.Binary,
                            9,
                            5
                        );

                        //Cv2.FastNlMeansDenoising
                        //(
                        //    matbuf,
                        //    mat

                        //);

                        return(matbuf.ToBitmapSource());;
                    }
        }
Esempio n. 20
0
        private static Bitmap PrepareImage(Bitmap image)
        {
            //Mat src = new Mat(@"D:\tesseract4\docs\tables\balans_1kv_2013_21.jpg", ImreadModes.GrayScale);
            //var src = Cv2.ImRead(@"D:\tesseract4\docs\tables\balans_1kv_2013_21.jpg");
            var gray = image.ToGrayscaleMat();

            var bw = new Mat();

            Cv2.AdaptiveThreshold(~gray, bw, 256, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 25, -2);

            var horizontal = bw.Clone();
            var vertical   = bw.Clone();
            var scale      = 15;

            var horizontalSize      = horizontal.Cols / scale;
            var verticalSize        = vertical.Rows / scale;
            var horizontalStructure = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(horizontalSize, 1));
            var verticalStructure   = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(1, verticalSize));

            Cv2.Erode(horizontal, horizontal, horizontalStructure, new OpenCvSharp.Point(-1, -1));
            Cv2.Dilate(horizontal, horizontal, horizontalStructure, new OpenCvSharp.Point(-1, -1));

            Cv2.Erode(vertical, vertical, verticalStructure, new OpenCvSharp.Point(-1, -1));
            Cv2.Dilate(vertical, vertical, verticalStructure, new OpenCvSharp.Point(-1, -1));

            //SaveImage(vertical, "vertical");

            //Cv2.Canny(src, dst, 50, 200);

            //using (new Window(horizontal))
            //{
            //  Cv2.WaitKey();
            //}
            var mask = horizontal + vertical;
            //SaveImage(mask, "mask");
            var newMask = new Mat();

            Cv2.AdaptiveThreshold(~mask, newMask, 256, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 7, -2);
            //SaveImage(newMask, "newMask");

            newMask = mask + newMask;

            SaveImage(newMask, "hyperMask");

            //var withOutTable = gray + newMask;

            //SaveImage(withOutTable, "withOutTable");

            return(BitmapConverter.ToBitmap(mask));
        }
Esempio n. 21
0
        public static Mat Threshold(this Mat image)
        {
            var thresh = new Mat();

            Cv2.AdaptiveThreshold(
                src: image,
                dst: thresh,
                maxValue: 255.0,
                adaptiveMethod: AdaptiveThresholdTypes.GaussianC,
                thresholdType: ThresholdTypes.BinaryInv,
                blockSize: ADAPTIVE_THRESH_BLOCK_SIZE,
                c: ADAPTIVE_THRESH_WEIGHT);
            return(thresh);
        }
Esempio n. 22
0
 public Mat Procesar(Mat entra, Mat sale = null)
 {
     if (sale == null)
     {
         sale = entra;
     }
     if (blockSize < 3)
     {
         blockSize = 3;               //no puede ser menor que 3
     }
     else if (blockSize % 2 == 0)
     {
         blockSize++;                       //no puede ser numero par
     }
     Cv2.AdaptiveThreshold(entra, sale, maxValue, adaptiveMethod, thresholdType, blockSize, C);
     return(sale);
 }
Esempio n. 23
0
        private void adaptive_Click(object sender, System.Windows.RoutedEventArgs e)
        {
            if (load == true)
            {
                Mat src = new Mat(fileName, 0);
                Mat dst = new Mat();

                Cv2.AdaptiveThreshold(src, dst, adaptive_max.Value, AdaptiveThresholdTypes.MeanC,
                                      ThresholdTypes.Binary, (int)adaptive_blocksize.Value, 3);

                showWindow(dst, "adaptive");
            }
            else
            {
                textLoad.Foreground = System.Windows.Media.Brushes.OrangeRed;
            }
        }
Esempio n. 24
0
        public bool FindSudoku(Mat src)
        {
            Cv2.CvtColor(src, gray, ColorConversionCodes.BGRA2GRAY);
            Cv2.AdaptiveThreshold(gray, gray, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 101, 5);
            Cv2.BitwiseNot(gray, gray);

            Cv2.FindContours(gray, out contours, out hierarchyIndex, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            if (!findContourOfPuzzle())
            {
                return(false);
            }

            unwarpSudokuSquare();

            return(true);
            //Cv2.DrawContours(src, contour_list, largestContourIndex, Scalar.Green,30);
        }
Esempio n. 25
0
        public int Otsu(Mat Source, OutputArray _Dst, OtsuType Type)
        {
            Mat Dst       = _Dst.GetMat();
            int Threshold = 0;

            using (Mat Src = new Mat())
            {
                Source.CopyTo(Src);
                switch ((int)Type)
                {
                case 0:
                    Threshold = (int)Cv2.Threshold(Src, Dst, 0, 255, ThresholdTypes.Otsu);
                    break;

                case 1:
                    Threshold = BaseOtsuCompute(Src, (int)Type);
                    break;

                case 2:
                    Threshold = BaseOtsuCompute(Src, (int)Type);
                    break;

                case 3:
                    Threshold = BaseOtsuCompute(Src, (int)Type);
                    break;

                case 4:
                    if (adaptiveTypes == 0)
                    {
                        Cv2.AdaptiveThreshold(Src, Dst, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, adaptBlockSize, adapt_c);
                    }
                    else if (adaptiveTypes == 1)
                    {
                        Cv2.AdaptiveThreshold(Src, Dst, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, adaptBlockSize, adapt_c);
                    }

                    break;

                default:
                    break;
                }
            }

            return(Threshold);
        }
Esempio n. 26
0
        private bool DigitRecog(out int[] SDK8, bool dispB = false)
        {
            Mat _frame = NuPz_Win.frame00.CvtColor(ColorConversionCodes.BGR2GRAY);

            Cv2.AdaptiveThreshold(_frame, frameWB, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 51, 5);  //c=10

            if (sdkFRec == null)
            {
                sdkFRec = new sdkFrameRecgV3(fName: fName);
            }
            SDK8 = sdkFRec.sdkFrameRecg_SolverV3(imgWhiteB: frameWB, thrVal: 128, DispB: false);
            if (SDK8 == null)
            {
                return(false);
            }
            for (int k = 0; k < 81; k++)
            {
                SDK64[k] = SDK64[k] << 4 | (uint)(SDK8[k] & 0xF);
            }

            bool succB = false;

            SDK8 = DigitRecog_MajorityVote(ref succB);

            if (dispB)
            {
                string st  = "";
                int    eCC = 0;
                foreach (var p in SDK8)
                {
                    if (p == 0 || p > 10)
                    {
                        st += "#"; eCC++;
                    }
                    else
                    {
                        st += (p <= 9)? p.ToString(): ".";
                    }
                }
                WriteLine("DigitRecog:" + st);
            }
            return(succB);
        }
Esempio n. 27
0
        public bool FindSudoku(Mat gray)
        {
            this.gray = gray;
            Cv2.AdaptiveThreshold(gray, gray, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 101, 5);
            Cv2.BitwiseNot(gray, gray);

            Cv2.FindContours(gray, out contours, out hierarchyIndex, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
            if (!findContourOfPuzzle())
            {
                return(false);
            }

            for (int i = 0; i < 4; i++)
            {
                corners[i] = new SKPoint(likelyCandidate[i].X, likelyCandidate[i].Y);
            }

            unwarpSudokuSquare();
            return(true);
        }
Esempio n. 28
0
        static void Main(string[] args)
        {
            // Used to check memory leak
            //for (int i = 0; i < 1000; i++)
            using (var state = new ThreadLocal <FormExtractionHandle>(NativeFormExtraction.CreateFormExtraction))
            {
                GC.Collect();
                List <string> pathFiles = GetSamplesAndCleanUpResults();

                // For testing:
                pathFiles = pathFiles.Where(m => m.Contains("form9")).ToList();

                int numThread      = 1;            // Environment.ProcessorCount;
                var showDebugImage = true;         // If true, you may want to use: numThread = 1.

                Parallel.ForEach(pathFiles, new ParallelOptions {
                    MaxDegreeOfParallelism = numThread
                }, pathFile =>
                {
                    FormExtractionHandle handle = state.Value;

                    NativeFormExtraction.SetOptions(handle, 800, 25, 15, 5, 20000, 50000, showDebugImage);

                    var resizeWidth = 800;
                    var orig        = new Mat(pathFile);
                    var image       = new Mat(pathFile, ImreadModes.GrayScale);

                    Cv2.AdaptiveThreshold(image, image, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 9, 4);

                    // Resize image if too large.
                    if (image.Width > resizeWidth)
                    {
                        var height = resizeWidth * image.Height / image.Width;
                        Cv2.Resize(image, image, new Size(resizeWidth, height));
                    }

                    Cv2.BitwiseNot(image, image);
                    Cv2.Dilate(image, image, Cv2.GetStructuringElement(MorphShapes.Cross, new Size(2, 2)));

                    MatOfByte mat             = new MatOfByte(image);
                    MatIndexer <byte> indexer = mat.GetIndexer();

                    var row      = image.Height;
                    var col      = image.Width;
                    Mat newImage = new Mat(row, col, MatType.CV_8UC3);
                    newImage.SetTo(Scalar.Black);

                    // We must determine if it "may" be an interesting blob.
                    Stopwatch watch = new Stopwatch();
                    watch.Start();

                    int[] imgData = new int[row * col];
                    for (int y = 0; y < row; y++)
                    {
                        for (int x = 0; x < col; x++)
                        {
                            imgData[y + x * row] = indexer[y, x];
                        }
                    }

                    var result = NativeFormExtraction.RunFormExtraction(handle, imgData, row, col);
                    if (result != 0)
                    {
                        throw new Exception("Unknown error occured with the function: RunFormExtraction");
                    }
                    watch.Stop();
                    Console.WriteLine("Duration: " + watch.Elapsed);

                    if (showDebugImage)
                    {
                        var debugImg = NativeFormExtraction.GetDebugImage(handle, row * col);

                        var img = CreateImage(debugImg, row, col, hasColor: true);
                        Cv2.BitwiseOr(newImage, img, newImage);

                        Cv2.BitwiseNot(image, image);
                        int width  = 400;
                        var height = width * image.Height / image.Width;
                        Cv2.Resize(orig, orig, new Size(width, height));
                        Cv2.Resize(image, image, new Size(width, height));
                        Cv2.Resize(newImage, newImage, new Size(width, height));

                        using (new Window("orig", orig))
                            using (new Window("pre", image))
                                using (new Window("post", newImage))
                                {
                                    Cv2.WaitKey();
                                    Cv2.DestroyAllWindows();
                                }
                    }

                    // Dispose.
                    orig.Dispose();
                    image.Dispose();
                    newImage.Dispose();
                    mat.Dispose();
                });
            }

            Console.WriteLine("End");
            Console.ReadLine();
        }
 //自适应阈值分割函数封装
 private Mat AdaptiveThreshold(Mat image, Mat grayImage, Mat binImage, AdaptiveThresholdTypes adaptiveThresholdTypes, ThresholdTypes thresholdTypes)
 {
     Cv2.CvtColor(image, grayImage, ColorConversionCodes.BGR2GRAY);//色彩空间转换
     Cv2.AdaptiveThreshold(grayImage, binImage, 255, adaptiveThresholdTypes, thresholdTypes, 7, 1);
     return(binImage);
 }
Esempio n. 30
0
        public static string FindContours(Mat img)
        {
            Mat src = img;
            Mat gray = new Mat(), dst = new Mat(), hsvImage = new Mat();

            Point[][]        contours;
            HierarchyIndex[] hierarchys;
            Point2f[]        point2Fs = new Point2f[] { };
            List <Point2f[]> point2 = new List <Point2f[]>();
            Point            p0 = new Point(0, 0), p1 = new Point(0, 0), p2 = new Point(0, 0), p3 = new Point(0, 0);

            //ImageMethod.HistogramEqualization(src, src);
            Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY, 0);

            Mat x = Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(6, 6)); //!!!调整size

            Cv2.MorphologyEx(gray, gray, MorphTypes.Open, x);                       //!!!调整MorphTypes
            Cv2.Erode(gray, gray, x);
            //Cv2.Dilate(gray,gray,x);
            int thresh_size = (100 / 4) * 2 + 1;//自适应阈值化

            Cv2.AdaptiveThreshold(gray, gray, 255, 0, ThresholdTypes.Binary, thresh_size, thresh_size / 3);
            new Window("gray", WindowMode.FreeRatio, gray);

            Cv2.FindContours(gray, out contours, out hierarchys, RetrievalModes.External, ContourApproximationModes.ApproxSimple, null);//!!调整两个modes
            int resultnum = 0;

            Point[][] Excontours = contours;
            for (int i = 0; i < hierarchys.Length; i++)
            {
                double area = Cv2.ContourArea(contours[i], false);
                if (area < 50)
                {
                    continue;
                }
                RotatedRect rect = Cv2.MinAreaRect(contours[i]);
                point2Fs = rect.Points();
                Point[] po = change(rect.Points());
                //point2.Add(point2Fs);
                Excontours[resultnum] = po;
                for (int z = 0; z < point2Fs.Length; z++)//小数位精度——2
                {
                    point2Fs[z].X = (float)Math.Round(point2Fs[z].X, 2);
                    point2Fs[z].Y = (float)Math.Round(point2Fs[z].Y, 2);
                }
                point2.Add(point2Fs);
                for (int j = 0; j < 3; j++)
                {
                    p0 = new Point(point2Fs[j].X, point2Fs[j].Y);
                    p1 = new Point(point2Fs[j + 1].X, point2Fs[j + 1].Y);
                    Cv2.Line(src, p0, p1, Scalar.Red, 3, LineTypes.Link8);
                }
                p2 = new Point(point2Fs[3].X, point2Fs[3].Y);
                p3 = new Point(point2Fs[0].X, point2Fs[0].Y);
                Point TP = new Point((((p0.X + p1.X) / 2)), ((p1.Y + p2.Y) / 2));
                Cv2.Line(src, p2, p3, Scalar.Red, 3, LineTypes.Link8);
                resultnum++;
            }
            Console.WriteLine("剔除后的轮廓数:" + Excontours.Length);
            string json = JsonConvert.SerializeObject(Excontours);
            //Console.WriteLine(json);
            string path = @"C:\toolkipweb\miniProgram\opencvtest\opencv\test.jpg";

            Cv2.ImWrite(path, src);
            path = "https://www.toolkip.com/miniProgram/opencvtest/opencv/test.jpg";
            new Window("result", WindowMode.FreeRatio, src);
            Window.WaitKey();
            return(path + "--" + json);
        }