Пример #1
0
    // Update is called once per frame
    void Update()
    {
        cap.Read(frame);


        if (!frame.Empty())
        {
            //assume this part of the frame contains only background
            smoothed_img = frame.Blur(new Size(5, 5));

            frame_hsv = frame.CvtColor(ColorConversionCodes.BGR2HSV);
            Scalar lb = new Scalar(0, 0, 50);
            Scalar ub = new Scalar(180, 70, 180);

            Mat disc = Cv2.GetStructuringElement(MorphShapes.Ellipse, new Size(7, 7));

            Cv2.MorphologyEx(thresh, thresh, MorphTypes.Close, disc, null, 3);


            contours = Cv2.FindContoursAsMat(thresh, RetrievalModes.List, ContourApproximationModes.ApproxSimple);


            mask = new Mat(thresh.Size(), thresh.Type(), Scalar.All(0));


            Cv2.Merge(new Mat[] { mask, mask, mask }, mask);
            Cv2.BitwiseAnd(mask, frame, mask);

            //Cv2.Merge(new Mat[]{frame_backproj,frame_backproj,frame_backproj},frame_backproj);

            tex.LoadImage(smoothed_img.ToBytes(".png", new int[] { 0 }));
        }
    }
Пример #2
0
        public Mat Detect(Mat input)
        {
            var canny  = input.Blur(new Size(3, 3)).Canny(Threshold, Threshold * Ratio);
            var output = new Mat(canny.Size(), canny.Type(), Scalar.All(0));

            input.CopyTo(output, canny);
            return(output);
        }
Пример #3
0
        /// <summary>
        /// Try to match (part of) a large green circle on the screen.
        /// </summary>
        public CircleSegment FindCorona()
        {
            // see the Experiments for how this works
            Bitmap cropped = CompassSensor.Crop(screen.bitmap,
                                                screen.bitmap.Width * 1 / 3,
                                                screen.bitmap.Height * 1 / 3,
                                                screen.bitmap.Width * 2 / 3,
                                                screen.bitmap.Height * 2 / 3);
            Mat screenwhole = BitmapConverter.ToMat(cropped);

            Point2f ShipPointerOffset = new Point2f(0, 0);

            try
            {
                ShipPointerOffset = FindShipPointer(IsolateYellow(screenwhole));
            }
            catch (Exception)
            {
                // If we can't find the ship pointer (it's hard to see it against the sun) then use the middle of the screen.
            }

            // erase the vivid areas, otherwise the blur subtraction turns yellow near red to green
            Mat brightHSV     = screenwhole.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat darkAreasMask = brightHSV.InRange(InputArray.Create(new int[] { 0, 0, 0 }), InputArray.Create(new int[] { 180, 255, 180 }));
            Mat darkAreas     = new Mat();

            screenwhole.CopyTo(darkAreas, darkAreasMask);

            Mat screenblur        = darkAreas - darkAreas.Blur(new OpenCvSharp.Size(10, 10));
            Mat sourceHSV         = screenblur.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat mask              = sourceHSV.InRange(InputArray.Create(new int[] { 35, 204, 20 }), InputArray.Create(new int[] { 90, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Mat sourceGrey = sourceHSVFiltered.Split()[2].InRange(32, 256);

            LineSegmentPoint[] result = sourceGrey.HoughLinesP(1, 3.1415 / 180, 5, 10, 2);
            List <Point2d>     points = new List <Point2d>();

            foreach (var line in result)
            {
                points.Add(line.P1);
                points.Add(line.P2);
            }
            if (points.Count < 8)
            {
                throw new ArgumentException("Not enough points in corona circle");
            }
            CircleSegment c = ComputeCircle(points);

            sourceGrey.Line(c.Center, ShipPointerOffset, 255);
            c.Center -= ShipPointerOffset; // adjust for camera movement by taking ship pointer offset
            sourceGrey.Circle(c.Center, (int)c.Radius, 255);
            debugWindow.Image = BitmapConverter.ToBitmap(sourceGrey);
            return(c);
        }
Пример #4
0
        public Mat Process(Mat frame)
        {
            mFrame = frame;

            Size blurWinSize = new Size(mBlurWinWidth, mBlurWinHeight);

            mFrame.Blur(blurWinSize);

            var reshaped = mFrame.Reshape(cn: 3, rows: mFrame.Rows * mFrame.Cols);
            var samples  = new Mat();

            reshaped.ConvertTo(samples, MatType.CV_32FC3);

            var bestLabels = new Mat();
            var centers    = new Mat();

            Cv2.Kmeans(samples,
                       mClustNum,
                       bestLabels,
                       new TermCriteria(type: CriteriaType.Eps | CriteriaType.MaxIter, maxCount: 10, epsilon: 1.0),
                       3,
                       KMeansFlags.PpCenters,
                       centers);

            //This is not optimal solution, but it works in this case.
            Mat clusteredImage = new Mat(mFrame.Rows, mFrame.Cols, mFrame.Type());

            for (var size = 0; size < mFrame.Cols * mFrame.Rows; size++)
            {
                var clusterIndex = bestLabels.At <int>(0, size);
                var newPixel     = new Vec3b
                {
                    Item0 = (byte)(centers.At <float>(clusterIndex, 0)), // B
                    Item1 = (byte)(centers.At <float>(clusterIndex, 1)), // G
                    Item2 = (byte)(centers.At <float>(clusterIndex, 2))  // R
                };
                clusteredImage.Set(size / mFrame.Cols, size % mFrame.Cols, newPixel);
            }

            mFrame = clusteredImage;

            Mat gray   = mFrame.CvtColor(ColorConversionCodes.BGR2GRAY);
            Mat thresh = gray.Threshold(140, 255, ThresholdTypes.Otsu);

            Mat erode  = thresh.Erode(new Mat());
            Mat dilate = erode.Dilate(new Mat());

            Mat morph = new Mat();

            Cv2.BitwiseXor(erode, dilate, morph);

            return(morph);
        }
Пример #5
0
        public static void MatchCorona()
        {
            Bitmap screen      = new Bitmap("Screenshot_0028.bmp");
            Bitmap cropped     = CompassSensor.Crop(screen, screen.Width * 1 / 3, screen.Height * 1 / 3, screen.Width * 2 / 3, screen.Height * 2 / 3);
            Mat    screenwhole = BitmapConverter.ToMat(cropped);

            // erase the vivid areas, otherwise the blur subtraction turns yellow near red to green
            Mat brightHSV     = screenwhole.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat darkAreasMask = brightHSV.InRange(InputArray.Create(new int[] { 0, 0, 0 }), InputArray.Create(new int[] { 180, 255, 180 }));
            Mat darkAreas     = new Mat();

            screenwhole.CopyTo(darkAreas, darkAreasMask);

            Mat    screenblur = darkAreas - darkAreas.Blur(new OpenCvSharp.Size(10, 10));
            Window w3         = new Window(screenblur);

            //screenblur.SaveImage("sharplines.png");
            Mat sourceHSV = screenblur.CvtColor(ColorConversionCodes.BGR2HSV);

            /* Paint.Net uses HSV [0..360], [0..100], [0..100].
             * OpenCV uses H: 0 - 180, S: 0 - 255, V: 0 - 255
             * Paint.NET colors:
             * 73   100 18     brightest part of green edge
             * 72   98  9      very dark green
             * suggested range [70..180], [80..100], [8..100] (paint.net)
             * suggested range [35..90], [204..255], [20..255] (openCV)
             * */
            Mat mask = sourceHSV.InRange(InputArray.Create(new int[] { 35, 204, 20 }), InputArray.Create(new int[] { 90, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Window w5         = new Window("yellowfilter", sourceHSVFiltered.CvtColor(ColorConversionCodes.HSV2BGR));
            Mat    sourceGrey = sourceHSVFiltered.Split()[2].InRange(32, 256); // Value channel is pretty good as a greyscale conversion
            Window w6         = new Window("yellowFilterValue", sourceGrey);

            LineSegmentPoint[] result = sourceGrey.HoughLinesP(1, 3.1415 / 180, 5, 10, 2);
            List <Point2d>     points = new List <Point2d>();

            foreach (var line in result)
            {
                points.Add(line.P1);
                points.Add(line.P2);
                darkAreas.Line(line.P1, line.P2, new Scalar(255, 0, 255));
            }
            CircleSegment c = CruiseSensor.ComputeCircle(points);

            darkAreas.Circle(c.Center, (int)c.Radius, new Scalar(255, 255, 0));
            Window w9 = new Window("final", darkAreas);
        }
Пример #6
0
        public Mat ChangeMeanBlur(Mat imageSource, int value)
        {
            if (imageSource == null)
            {
                return(null);
            }

            if (value == 1)
            {
                return(imageSource);
            }

            Size size = new Size(value, value);

            return(imageSource.Blur(size));
        }
Пример #7
0
        public static Mat BinarizationMat(Mat src)
        {
            Mat gray   = new Mat();
            Mat binary = new Mat();

            Cv2.CvtColor(src, gray, ColorConversionCodes.RGB2GRAY);
            gray = gray.Blur(new Size(3, 3));
            gray = gray.GaussianBlur(new Size(5, 5), 0);
            gray = gray.BoxFilter(-1, new Size(10, 10), normalize: true);
            Cv2.Threshold(gray, binary, 100, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);
            var element = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));

            binary = binary.Erode(element);
            binary = binary.MorphologyEx(MorphTypes.Close, element);
            //Cv2.ImShow("bin " + DateTime.Now, binary);
            return(binary);
        }
Пример #8
0
        public static unsafe Mat NormalizeTextImage2(
            Mat input,
            Size size)
        {
            using var mean = input.Blur(size);
            using var otsu = mean.
                             Threshold(100, 255, ThresholdTypes.BinaryInv | ThresholdTypes.Otsu);
            using var ones   = Mat.Ones(size.Width, size.Height, MatType.CV_8SC1);
            using var dilate = otsu.Dilate(ones);
            var mask = dilate;

            using var maskInv = ~mask;
            using InputArray maskInvInputArray           = maskInv;
            using var backgroundMean                     = new Mat();
            using OutputArray backgroundMeanOutputArray  = backgroundMean;
            using var backgroundStdev                    = new Mat();
            using OutputArray backgroundStdevOutputArray = backgroundStdev;

            input.MeanStdDev(
                backgroundMeanOutputArray,
                backgroundStdevOutputArray,
                maskInvInputArray);

            var meanValue  = backgroundMean.Get <double>(0);
            var stdevValue = backgroundStdev.Get <double>(0);
            var background = (byte)Math.Max(meanValue - stdevValue * 3, 0);

            var output = new Mat(input.Size(), MatType.CV_8UC1, background);

            input.CopyTo(output, mask);

            output = output.Threshold(background, background, ThresholdTypes.Trunc);

            //var clahe = Cv2.CreateCLAHE();

            //clahe.Apply(output, output);


            //using(new Window("Output", WindowMode.Normal | WindowMode.KeepRatio, output))
            //{
            //  Cv2.WaitKey();
            //}

            return(output);
        }
Пример #9
0
        public override void CreateFilteredMat()
        {
            if (UseCastableDetection())
            {
                CasteableDetection = new Mat(this.CaptureSource, this.CastableDetectionArea);
                CasteableDetection = CasteableDetection.Resize(new Size(1, 1), 0, 0, InterpolationFlags.Cubic);
                ImageFilterHelper.reduceColor(CasteableDetection, 64);
            }

            FilteredMat = CaptureSource;
            if (this.IsCastable() || !UseCastableDetection())
            {
                FilteredMat = new Mat(this.CaptureSource, new Rect(this.BorderCut, this.BorderCut, this.CaptureSource.Width - this.BorderCut * 2, this.CaptureSource.Height - this.BorderCut * 2));
                FilteredMat = FilteredMat.Blur(new Size(5, 5), new Point(-1, -1));
                ImageFilterHelper.killDarkPixel(FilteredMat, 60);
                // ImageFilterHelper.KillGrayPixel(ref image, 60);
                ImageFilterHelper.saturation(FilteredMat, 0, 2, 50);
            }
        }
Пример #10
0
        async Task <Bitmap> CVBasic()
        {
            var kernel = Math.Pow(2, Slider0.Value * 10);

            SliderLabel0.Text = kernel.ToString();
            var    t1     = Slider1.Value * 256;
            var    t2     = Slider2.Value * 256;
            Bitmap bitmap = await GetBitmap();

            await Task.Run(() =>
            {
                Mat mat = bitmap.ToMat();
                mat     = ~mat.CvtColor(ColorConversionCodes.RGBA2GRAY);
                mat     = mat.Blur(new OpenCvSharp.Size(kernel, kernel));
                mat     = mat.Threshold(t1, t2, ThresholdTypes.Otsu);
                bitmap  = mat.ToBitmap();
            });

            return(bitmap);
        }
Пример #11
0
        public static unsafe Mat RemoveWatermark(Mat input, byte background, Size size)
        {
            var output = new Mat(input.Size(), MatType.CV_8UC1);
            var index  = input.GetUnsafeGenericIndexer <byte>();

            using var mean = input.Blur(size);
            var meanIndex = mean.GetUnsafeGenericIndexer <byte>();

            output.ForEachAsByte((value, position) =>
            {
                var y       = position[0];
                var x       = position[1];
                var invalue = index[y, x];
                var mean    = meanIndex[y, x];

                *value = (mean < background) && (invalue < background) ?
                         invalue : background;
            });

            return(output);
        }
Пример #12
0
    // FaceTracking
    void DoTracking()
    {
        //while (running)
        //{
        try
        {
            if (kinect.GetDepthRaw())
            {
                //lock (this)
                //{
                src = DoDepthBuffer(kinect.usersDepthMap, KinectWrapper.GetDepthWidth(), KinectWrapper.GetDepthHeight());
                roi = src.Clone(new OpenCvSharp.CPlusPlus.Rect(roiX, roiY, roiW, roiH));
                roi.ConvertTo(roi, OpenCvSharp.CPlusPlus.MatType.CV_8U, 255.0 / 32000.0);
                Cv2.Subtract(new Mat(roiH, roiW, MatType.CV_8UC1, new Scalar(255)), roi, roi);
                double threshMax = 255.0 - ((255.0 / 32000.0) * ((ushort)srcThreshMax << 3));
                double threshMin = 255.0 - ((255.0 / 32000.0) * ((ushort)srcThreshMin << 3));
                roi = roi.Threshold(threshMin, 255.0, ThresholdType.ToZeroInv);
                roi = roi.Threshold(threshMax, 255.0, ThresholdType.ToZero);
                // Flip up/down dimension and right/left dimension
                if (!FlipUpDownAxis && FlipLeftRightAxis)
                    roi.Flip(FlipMode.XY);
                else if (!FlipUpDownAxis)
                    roi.Flip(FlipMode.X);
                else if (FlipLeftRightAxis)
                    roi.Flip(FlipMode.Y);

                //Apply ellliptical mask
                Mat ellipseMask = new Mat(roi.Rows, roi.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8U, new Scalar(0.0));
                Cv2.Ellipse(ellipseMask, new Point(ellipseMaskCenterX, ellipseMaskCenterY), new Size(axisMaskX, axisMaskY), maskAngle, maskStartAngle, maskEndAngle, new Scalar(255.0), -1);
                Cv2.BitwiseAnd(roi, ellipseMask, roi);
                //Remove noise by morphologyEx
                Mat kernel = Cv2.GetStructuringElement(StructuringElementShape.Ellipse, new Size(3, 3));
                Cv2.MorphologyEx(roi, roi, MorphologyOperation.Open, kernel);
                Cv2.MorphologyEx(roi, roi, MorphologyOperation.Close, kernel);
                //Subtract background
                if (first)
                {
                    bg = roi.Clone();
                    //bg = bg.Blur(new Size(smoothBlur, smoothBlur));
                    first = false;
                }
                fg = bg.Clone();
                //roi = roi.Blur(new Size(smoothBlur, smoothBlur));
                Mat subMask = roi.Clone();
                subMask = subMask.Threshold(smThresh, 255.0, ThresholdType.ToZero);
                //Cv2.ImShow("sm",subMask);
                bg.CopyTo(roi, subMask);
                OpenCvSharp.Cv.AbsDiff(roi.ToCvMat(), bg.ToCvMat(), fg.ToCvMat());
                //Threshold foreground image
                fgthresh = fg.Threshold(threshold, 255.0, ThresholdType.Binary);
                fgthresh = fgthresh.Blur(new Size(smoothBlur, smoothBlur));
                //Detect Blobs
                Mat roiToImg = new Mat(roi.Cols, roi.Rows, MatType.CV_8UC3);
                Mat threshToImg = fgthresh.Clone();
                Cv2.Merge(new Mat[] { roi, roi, roi }, roiToImg);
                IplImage showImg = roiToImg.ToIplImage();
                IplImage fgthreshImg = threshToImg.ToIplImage();
                OpenCvSharp.Blob.CvBlobLib.Label(fgthreshImg, blobs);
                OpenCvSharp.Blob.CvBlobLib.FilterByArea(blobs, blobMinArea, blobMaxArea);
                OpenCvSharp.Blob.CvBlobLib.RenderBlobs(blobs, fgthreshImg, showImg, RenderBlobsMode.Color | RenderBlobsMode.Centroid);
                UpdateTracks(blobs, tracks, blobMinDistance, blobMaxLife);
                //OpenCvSharp.Blob.CvBlobLib.RenderTracks(tracks, fgthreshImg, showImg, RenderTracksMode.BoundingBox | RenderTracksMode.Id);
                RenderTracks(showImg);
                //Cv.ShowImage("thres", fgthreshImg);
                Cv.ShowImage("showBlob", showImg);
                //Check Blob Actions
                //Debug.Log(tracks.Count);

                //}
            }

        }
        catch (System.Exception e)
        {
            //throw e;
            Debug.Log(e.Message + " " + e.StackTrace);
        }
        //}
    }
Пример #13
0
        public unsafe static FeatureInfo Features(
            TesseractEngine engine,
            Mat input,
            Size size,
            byte background)
        {
            using var scope = new Scope();

            var mask = input.
                       Threshold(100, 255, ThresholdTypes.BinaryInv | ThresholdTypes.Otsu).
                       Temp(scope).
                       Dilate(Mat.Ones(size.Width, size.Height, MatType.CV_8SC1).Temp(scope)).
                       In(scope);

            mask.FindContours(
                out var points,
                out var hierarchy,
                RetrievalModes.CComp,
                ContourApproximationModes.ApproxSimple);

            var nodes = points.
                        Select((contour, index) =>
            {
                if (hierarchy[index].Parent >= 0)
                {
                    return(null);
                }

                var rect = Cv2.BoundingRect(contour);

                if ((rect.Width < size.Width) && (rect.Height < size.Height))
                {
                    return(null);
                }

                using var roi = new Mat(rect.Size, MatType.CV_8UC1, 0);

                roi.DrawContours(
                    points,
                    index,
                    Scalar.White,
                    -1,
                    LineTypes.Link4,
                    null,
                    int.MaxValue,
                    -rect.TopLeft);

                using var inputView   = input[rect];
                using var maskedInput =
                          new Mat(rect.Size, MatType.CV_8UC1, background);

                inputView.CopyTo(maskedInput, roi);

                using var fineMask = maskedInput.
                                     Threshold(100, 255, ThresholdTypes.BinaryInv | ThresholdTypes.Otsu).
                                     Dilate(new Mat());

                fineMask.FindContours(
                    out var finePoints,
                    out var fineHierarchy,
                    RetrievalModes.CComp,
                    ContourApproximationModes.ApproxSimple);

                using var fineMask2 = new Mat(rect.Size, MatType.CV_8UC1, 0);

                fineMask2.DrawContours(finePoints, -1, Scalar.White, -1);

                using var bluredFineMask2 = fineMask2.Blur(size);

                using var maxedFineMask2 = new Mat(rect.Size, MatType.CV_8UC1);
                var ix = bluredFineMask2.GetUnsafeGenericIndexer <byte>();

                maxedFineMask2.ForEachAsByte((value, position) =>
                {
                    var y    = position[0];
                    var x    = position[1];
                    var minx = Math.Max(x - (size.Width >> 1), 0);
                    var maxx = Math.Min(x + ((size.Width + 1) >> 1), rect.Width);
                    var miny = Math.Max(y - (size.Height >> 1), 0);
                    var maxy = Math.Min(y + ((size.Height + 1) >> 1), rect.Height);
                    var max  = 0;

                    for (var iny = miny; iny < maxy; ++iny)
                    {
                        for (var inx = minx; inx < maxx; ++inx)
                        {
                            var invalue = ix[iny, inx];

                            if (max < invalue)
                            {
                                max = invalue;
                            }
                        }
                    }

                    var v = ix[y, x];

                    *value = v >= max && max > 0 ? (byte)255 : (byte)0;
                });

                //using(new Window("fineMask2 " + index, WindowMode.AutoSize, maxedFineMask2))
                //{
                //  Cv2.WaitKey();
                //}

                using var smooth    = Smooth(maskedInput, background, .1);
                using var sharpened = Sharpen(smooth, size);

                var lines = maxedFineMask2.HoughLinesP(
                    1,
                    Math.PI / 180,
                    100,
                    Math.Max(rect.Width, rect.Height) / 2,
                    size.Width);

                //if (lines.Length == 0)
                //{
                //  return null;
                //}

                var angle = 0;

                //(int)(lines.
                //  Average(l => Math.Atan2(l.P2.Y - l.P1.Y, l.P2.X - l.P1.Y)) /
                //    Math.PI * 180);

                //angle -= angle % 4;

                var ocrView = sharpened;

                //if(angle != 0)
                //{
                //  using var rotation = Cv2.GetRotationMatrix2D(new Point2f(), angle, 1);

                //  var cos = Math.Abs(rotation.At<double>(0, 0));
                //  var sin = Math.Abs(rotation.At<double>(0, 1));
                //  var width = (int)(rect.Height * sin + rect.Width * cos);
                //  var height = (int)(rect.Height * cos + rect.Width * sin);

                //  rotation.At<float>(0, 2) = (width - rect.Width) / 2;
                //  rotation.At<float>(1, 2) = (height - rect.Height) / 2;

                //  ocrView = sharpened.WarpAffine(
                //    rotation,
                //    new Size(width, height),
                //    InterpolationFlags.Area,
                //    BorderTypes.Constant,
                //    new Scalar(background));
                //}

                using var ocrViewResource = ocrView;
                var node = OCR(engine, ocrView, 225);

                //if(index == 44)
                //{
                //  index = 44;
                //}

                if (!(node?.Children?.Count > 0))
                {
                    return(null);
                }

                node.Contours = finePoints;
                node.Angle    = angle;
                node.Left     = rect.Left;
                node.Top      = rect.Top;
                node.Width    = rect.Width;
                node.Height   = rect.Height;

                return(node);
            }).
                        ToArray();

            return(new FeatureInfo
            {
                Contours = points,
                Hierarchy = hierarchy,
                Nodes = nodes
            });
        }
Пример #14
0
        public Window1()
        {
            InitializeComponent();

            Mat src = new Mat(@"./desktop.jpg");

            Cv2.ImShow("src", src);
            //src = src.Resize(new Size(src.Width / 2, src.Height / 2));
            for (var y = 0; y < src.Height; y++)
            {
                for (var x = 0; x < src.Width; x++)
                {
                    var color = src.Get <Vec3b>(y, x);
                    //if (color.Item2 < 175)
                    if (color.Item2 < 225)
                    {
                        color.Item0 = 255;
                        color.Item1 = 0;
                        color.Item2 = 0;
                    }
                    src.Set(y, x, color);
                }
            }
            Cv2.ImShow("fade", src);
            Mat gray   = new Mat();
            Mat binary = new Mat();

            Cv2.CvtColor(src, gray, ColorConversionCodes.RGB2GRAY);
            gray = gray.GaussianBlur(new Size(5, 5), 0);
            gray = gray.Blur(new Size(5, 5));
            gray = gray.BoxFilter(-1, new Size(10, 10), normalize: true);
            Cv2.ImShow("gray", gray);
            Cv2.Threshold(gray, binary, 100, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);
            var element = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));

            binary = binary.Erode(element);
            binary = binary.MorphologyEx(MorphTypes.Close, element);
            Cv2.ImShow("bin", binary);
            //var line = binary.Canny(100, 200);
            //Cv2.ImShow("line", line);
            //Cv2.WaitKey();
            //建立轮廓接受数组
            Point[][]        contours;
            HierarchyIndex[] hierarchy;
            Cv2.FindContours(binary, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxTC89KCOS);
            //最小外接矩形接收数组
            MessageBox.Show(contours.Length.ToString());
            RotatedRect[] rotateRect    = new RotatedRect[contours.Length];
            Point[][]     contours_poly = new Point[contours.Length][];
            for (int i = 0; i < contours.Length; i++)
            {
                contours_poly[i] = Cv2.ApproxPolyDP(contours[i], 30, true); //返回凸包,单线长大于30过滤

                rotateRect[i] = Cv2.MinAreaRect(contours_poly[i]);          //最小外接矩形集合

                Point2f[] pot = new Point2f[4];                             //新建点集合接收点集合

                //for (int i = 0; i < rotateRect.Length; i++)
                //{
                var angle = rotateRect[i].Angle; //矩形角度
                pot = rotateRect[i].Points();    //矩形的4个角
                var line1 = Math.Sqrt((pot[0].X - pot[1].X) * (pot[0].X - pot[1].X) + (pot[0].Y - pot[1].Y) * (pot[0].Y - pot[1].Y));
                var line2 = Math.Sqrt((pot[0].X - pot[3].X) * (pot[0].X - pot[3].X) + (pot[0].Y - pot[3].Y) * (pot[0].Y - pot[3].Y));
                if (line1 * line2 < 1000)//过滤,太小的矩形直接pass
                {
                    continue;
                }
                if (line1 > line2)//依据实际情况进行判断
                {
                    angle += 90;
                }

                Mat Roi = new Mat(src.Size(), MatType.CV_8UC3);
                Roi.SetTo(0);//全黑
                //Cv2.DrawContours(binary, contours, -1, Scalar.White, -1);//在二值图像中圈出轮廓区域并染白
                Cv2.DrawContours(binary, contours, -1, Scalar.White, 1);
                Cv2.ImShow("bin", binary);
                src.CopyTo(Roi, binary);//将原图通过mask抠图到Roi
                Cv2.ImShow("Roi", Roi);
                Mat afterRotato = new Mat(src.Size(), MatType.CV_8UC3);
                afterRotato.SetTo(0);
                Point2f center = rotateRect[i].Center;
                Mat     M      = Cv2.GetRotationMatrix2D(center, angle, 1);                                   //计算变换矩阵
                Cv2.WarpAffine(Roi, afterRotato, M, Roi.Size(), InterpolationFlags.Linear, BorderTypes.Wrap); //得到变换后的图像,滤除其他信息
                Cv2.ImShow("旋转后", afterRotato);



                Mat bin2 = new Mat();
                Cv2.ImShow("after", afterRotato);
                Cv2.CvtColor(afterRotato, bin2, ColorConversionCodes.RGB2GRAY);
                Cv2.Threshold(bin2, bin2, 50, 255, ThresholdTypes.Binary | ThresholdTypes.Otsu);
                Point[][]        con;
                HierarchyIndex[] temp;//接收矫正后的轮廓信息
                Cv2.FindContours(bin2, out con, out temp, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
                for (int j = 0; j < con.Length; j++)
                {
                    Rect rect = Cv2.BoundingRect(con[j]); //直接使用矫正矩形,因为矫正后不需要再旋转
                    if (rect.Height * rect.Width < 8000)  //过滤干扰信息
                    {
                        continue;
                    }
                    Mat dstImg = new Mat(afterRotato, rect);
                    dstImg = dstImg.CvtColor(ColorConversionCodes.RGB2GRAY);
                    dstImg = dstImg.Threshold(50, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);
                    Cv2.BitwiseNot(dstImg, dstImg, new Mat());
                    dstImg = new Mat(dstImg, new Rect(100, 100, dstImg.Width - 200, dstImg.Height - 200));
                    Cv2.ImShow("dst", dstImg);
                    dstImg.SaveImage("dst1.jpg");

                    var engine     = new TesseractEngine("./tessdata", "eng", EngineMode.Default);
                    var resProcess = engine.Process(Pix.LoadTiffFromMemory(dstImg.ToBytes(".tiff")));
                    MessageBox.Show(resProcess.GetText());

                    ////string name = "dst" + i;//主要看调试的时候有几个结果
                    //dstImg = dstImg.CvtColor(ColorConversionCodes.RGB2GRAY);
                    //dstImg = dstImg.Threshold(10, 255, ThresholdTypes.Otsu);
                    //Cv2.ImShow("chan", dstImg.Canny(100, 200));

                    //dstImg.FindContours(out var con1, out var hie1, RetrievalModes.External,
                    //    ContourApproximationModes.ApproxNone);
                    //dstImg.DrawContours(con1, -1, Scalar.Green, 3);
                    //Cv2.ImShow("dst2", dstImg);
                }
            }
            Cv2.WaitKey();
            Console.ReadLine();
        }