コード例 #1
0
        /// <summary>
        /// Draws all given rectangles on the image in green.
        /// </summary>
        /// <param name="mat"></param>
        /// <param name="bounds"></param>
        /// <returns>Mat with drawn bounding boxes</returns>
        public Mat DrawBoundingBoxes(Mat mat, List <Rect> bounds)
        {
            foreach (Rect bound in bounds)
            {
                Imgproc.Rectangle(mat, new Point(bound.X, bound.Y), new Point(bound.X + bound.Width, bound.Y + bound.Height), new Scalar(0, 255, 0));
            }

            return(mat);
        }
コード例 #2
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            mRgbaT = inputFrame.Rgba();
            //mRgbaT = mRgba.T();
            //Core.Flip(mRgba.T(), mRgbaT, 1);
            //Imgproc.Resize(mRgbaT, mRgbaT, mRgba.Size());

            mGrayT = inputFrame.Gray();
            //mGrayT = mGray.T();
            //Core.Flip(mGray.T(), mGrayT, 1);
            //Imgproc.Resize(mGrayT, mGrayT, mGray.Size());
            //mRgba = mRgbaT;
            //mGray = mGrayT;

            if (mAbsoluteFaceSize == 0)
            {
                int height = mGrayT.Rows();
                if (Java.Lang.Math.Round(height * mRelativeFaceSize) > 0)
                {
                    mAbsoluteFaceSize = Java.Lang.Math.Round(height * mRelativeFaceSize);
                }
                mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
            }

            MatOfRect faces = new MatOfRect();

            if (mDetectorType == JAVA_DETECTOR)
            {
                if (mJavaDetector != null)
                {
                    mJavaDetector.DetectMultiScale(mGrayT, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                                   new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
                }
            }
            else if (mDetectorType == NATIVE_DETECTOR)
            {
                if (mNativeDetector != null)
                {
                    mNativeDetector.detect(mGrayT, faces);
                }
            }
            else
            {
                Log.Error(ActivityTags.FaceDetect, "Detection method is not selected!");
            }

            Rect[] facesArray = faces.ToArray();
            for (int i = 0; i < facesArray.Length; i++)
            {
                Imgproc.Rectangle(mRgbaT, facesArray[i].Tl(), facesArray[i].Br(), FACE_RECT_COLOR, 3);
            }

            return(mRgbaT);
        }
コード例 #3
0
        private Mat get_template(CascadeClassifier clssfr, Rect area, int size)
        {
            Mat       template     = new Mat();
            Mat       mROI         = mGray.Submat(area);
            MatOfRect eyes         = new MatOfRect();
            Point     iris         = new Point();
            Rect      eye_template = new Rect();

            clssfr.DetectMultiScale(mROI, eyes, 1.15, 2, Objdetect.CascadeFindBiggestObject | Objdetect.CascadeScaleImage,
                                    new Size(30, 30), new Size());

            Rect[] eyesArray = eyes.ToArray();
            for (int i = 0; i < eyesArray.Length;)
            {
                Rect e = eyesArray[i];
                e.X = area.X + e.X;
                e.Y = area.Y + e.Y;
                Rect eye_only_rectangle = new Rect((int)e.Tl().X,
                                                   (int)(e.Tl().Y + e.Height * 0.4), (int)e.Width,
                                                   (int)(e.Height * 0.6));
                mROI = mGray.Submat(eye_only_rectangle);
                Mat vyrez = mRgba.Submat(eye_only_rectangle);

                Core.MinMaxLocResult mmG = Core.MinMaxLoc(mROI);

                Imgproc.Circle(vyrez, mmG.MinLoc, 2, new Scalar(255, 255, 255, 255), 2);
                iris.X = mmG.MinLoc.X + eye_only_rectangle.X;
                iris.Y = mmG.MinLoc.Y + eye_only_rectangle.Y;

                eye_template = new Rect((int)iris.X - size / 2, (int)iris.Y - size / 2, size, size);

                Imgproc.Rectangle(mRgba, eye_template.Tl(), eye_template.Br(), new Scalar(255, 0, 0, 255), 2);
                template = (mGray.Submat(eye_template)).Clone();
                return(template);
            }
            return(template);
        }
コード例 #4
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            mRgba = inputFrame.Rgba();
            mGray = inputFrame.Gray();

            if (mAbsoluteFaceSize == 0)
            {
                int height = mGray.Rows();
                if (Math.Round(height * mRelativeFaceSize) > 0)
                {
                    mAbsoluteFaceSize = (int)Math.Round(height * mRelativeFaceSize);
                }
            }

            if (mZoomWindow == null || mZoomWindow2 == null)
            {
                CreateAuxiliaryMats();
            }

            MatOfRect faces = new MatOfRect();

            if (FACE != null)
            {
                FACE.DetectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                      new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
            }

            Rect[] facesArray = faces.ToArray();
            for (int i = 0; i < facesArray.Length; i++)
            {
                Imgproc.Rectangle(mRgba, facesArray[i].Tl(), facesArray[i].Br(), FACE_RECT_COLOR, 3);
                xCenter = (facesArray[i].X + facesArray[i].Width + facesArray[i].X) / 2;
                yCenter = (facesArray[i].Y + facesArray[i].Y + facesArray[i].Height) / 2;
                Point center = new Point(xCenter, yCenter);

                Imgproc.Circle(mRgba, center, 10, new Scalar(255, 0, 0, 255), 3);

                /* --- NOT NEEDED
                 * Imgproc.putText(mRgba, "[" + center.X + "," + center.Y + "]",
                 *      new Point(center.X + 20, center.Y + 20),
                 *      Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255));
                 */

                Rect r = facesArray[i];

                // compute the eye area
                Rect eyearea = new Rect(r.X + r.Width / 8,
                                        (int)(r.Y + (r.Height / 4.5)), r.Width - 2 * r.Width / 8, (int)(r.Height / 3.0));

                // split it
                Rect eyearea_right = new Rect(r.X + r.Width / 16, (int)(r.Y + (r.Height / 4.5)),
                                              (r.Width - 2 * r.Width / 16) / 2, (int)(r.Height / 3.0));

                Rect eyearea_left = new Rect(r.X + r.Width / 16 + (r.Width - 2 * r.Width / 16) / 2,
                                             (int)(r.Y + (r.Height / 4.5)), (r.Width - 2 * r.Width / 16) / 2, (int)(r.Height / 3.0));

                // draw the area - mGray is working grayscale mat, if you want to see area in rgb preview,
                // change mGray to mRgba
                Imgproc.Rectangle(mRgba, eyearea_left.Tl(), eyearea_left.Br(), new Scalar(255, 0, 0, 255), 2);
                Imgproc.Rectangle(mRgba, eyearea_right.Tl(), eyearea_right.Br(), new Scalar(255, 0, 0, 255), 2);

                if (learn_frames < 5)
                {
                    teplateR = get_template(EYE, eyearea_right, 24);
                    teplateL = get_template(EYE, eyearea_left, 24);
                    learn_frames++;
                }
                else
                {
                    // Learning finished, use the new templates for template matching
                    match_eye(eyearea_right, teplateR, method);
                    match_eye(eyearea_left, teplateL, method);
                }

                // cut eye areas and put them to zoom windows
                Imgproc.Resize(mRgba.Submat(eyearea_left), mZoomWindow2, mZoomWindow2.Size());
                Imgproc.Resize(mRgba.Submat(eyearea_right), mZoomWindow, mZoomWindow.Size());
            }

            /*
             * mRgba = inputFrame.Rgba();
             * Log.Info(TAG, "called OnCameraFrame");
             *
             * if (mIsColorSelected)
             * {
             *  mDetector.Process(mRgba);
             *  List<MatOfPoint> contours = mDetector.Contours;
             *  Log.Error(TAG, "Contours count: " + contours.Count);
             *  Imgproc.DrawContours(mRgba, contours, -1, CONTOUR_COLOR);
             *
             *  Mat colorLabel = mRgba.Submat(4, 68, 4, 68);
             *  colorLabel.SetTo(mBlobColorRgba);
             *
             *  Mat spectrumLabel = mRgba.Submat(4, 4 + mSpectrum.Rows(), 70, 70 + mSpectrum.Cols());
             *  mSpectrum.CopyTo(spectrumLabel);
             * }
             */

            return(mRgba);
        }
コード例 #5
0
        private void match_eye(Rect area, Mat mTemplate, int type)
        {
            Point matchLoc;
            Mat   mROI        = mGray.Submat(area);
            int   result_cols = mROI.Cols() - mTemplate.Cols() + 1;
            int   result_rows = mROI.Rows() - mTemplate.Rows() + 1;

            // Check for bad template size
            if (mTemplate.Cols() == 0 || mTemplate.Rows() == 0)
            {
                return;
            }

            Mat mResult = new Mat(result_cols, result_rows, CvType.Cv8u);

            switch (type)
            {
            case TM_SQDIFF:
                Imgproc.MatchTemplate(mROI, mTemplate, mResult, Imgproc.TmSqdiff);
                break;

            case TM_SQDIFF_NORMED:
                Imgproc.MatchTemplate(mROI, mTemplate, mResult, Imgproc.TmSqdiffNormed);
                break;

            case TM_CCOEFF:
                Imgproc.MatchTemplate(mROI, mTemplate, mResult, Imgproc.TmCcoeff);
                break;

            case TM_CCOEFF_NORMED:
                Imgproc.MatchTemplate(mROI, mTemplate, mResult, Imgproc.TmCcoeffNormed);
                break;

            case TM_CCORR:
                Imgproc.MatchTemplate(mROI, mTemplate, mResult, Imgproc.TmCcorr);
                break;

            case TM_CCORR_NORMED:
                Imgproc.MatchTemplate(mROI, mTemplate, mResult, Imgproc.TmCcorrNormed);
                break;
            }

            Core.MinMaxLocResult mmres = Core.MinMaxLoc(mResult);

            // there is difference in matching methods - best match is max/min value
            if (type == TM_SQDIFF || type == TM_SQDIFF_NORMED)
            {
                matchLoc = mmres.MinLoc;
            }
            else
            {
                matchLoc = mmres.MaxLoc;
            }

            Point matchLoc_tx = new Point(matchLoc.X + area.X, matchLoc.Y + area.Y);
            Point matchLoc_ty = new Point(matchLoc.X + mTemplate.Cols() + area.X,
                                          matchLoc.Y + mTemplate.Rows() + area.Y);

            Imgproc.Rectangle(mRgba, matchLoc_tx, matchLoc_ty, new Scalar(255, 255, 0, 255));
            Rect rec = new Rect(matchLoc_tx, matchLoc_ty);
        }
コード例 #6
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            Mat  rgba     = inputFrame.Rgba();
            Size sizeRgba = rgba.Size();

            Mat rgbaInnerWindow;

            int rows = (int)sizeRgba.Height;
            int cols = (int)sizeRgba.Width;

            int left = cols / 8;
            int top  = rows / 8;

            int width  = cols * 3 / 4;
            int height = rows * 3 / 4;

            switch (ImageManipulationsActivity.viewMode)
            {
            case ImageManipulationsActivity.VIEW_MODE_RGBA:
                break;

            case ImageManipulationsActivity.VIEW_MODE_HIST:
                Mat hist     = new Mat();
                int thikness = (int)(sizeRgba.Width / (mHistSizeNum + 10) / 5);
                if (thikness > 5)
                {
                    thikness = 5;
                }
                int offset = (int)((sizeRgba.Width - (5 * mHistSizeNum + 4 * 10) * thikness) / 2);
                // RGB
                for (int c = 0; c < 3; c++)
                {
                    Imgproc.CalcHist(Arrays.AsList(rgba).Cast <Mat>().ToList(), mChannels[c], mMat0, hist, mHistSize, mRanges);
                    Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                    hist.Get(0, 0, mBuff);
                    for (int h = 0; h < mHistSizeNum; h++)
                    {
                        mP1.X = mP2.X = offset + (c * (mHistSizeNum + 10) + h) * thikness;
                        mP1.Y = sizeRgba.Height - 1;
                        mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                        Imgproc.Line(rgba, mP1, mP2, mColorsRGB[c], thikness);
                    }
                }
                // Value and Hue
                Imgproc.CvtColor(rgba, mIntermediateMat, Imgproc.ColorRgb2hsvFull);
                // Value
                Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[2], mMat0, hist, mHistSize, mRanges);
                Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                hist.Get(0, 0, mBuff);
                for (int h = 0; h < mHistSizeNum; h++)
                {
                    mP1.X = mP2.X = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
                    mP1.Y = sizeRgba.Height - 1;
                    mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                    Imgproc.Line(rgba, mP1, mP2, mWhilte, thikness);
                }
                // Hue
                Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[0], mMat0, hist, mHistSize, mRanges);
                Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                hist.Get(0, 0, mBuff);
                for (int h = 0; h < mHistSizeNum; h++)
                {
                    mP1.X = mP2.X = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
                    mP1.Y = sizeRgba.Height - 1;
                    mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                    Imgproc.Line(rgba, mP1, mP2, mColorsHue[h], thikness);
                }
                break;

            case ImageManipulationsActivity.VIEW_MODE_CANNY:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
                Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_SOBEL:
                Mat gray            = inputFrame.Gray();
                Mat grayInnerWindow = gray.Submat(top, top + height, left, left + width);
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Sobel(grayInnerWindow, mIntermediateMat, CvType.Cv8u, 1, 1);
                Core.ConvertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
                Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4);
                grayInnerWindow.Release();
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_SEPIA:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Core.Transform(rgbaInnerWindow, rgbaInnerWindow, mSepiaKernel);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_ZOOM:
                Mat zoomCorner  = rgba.Submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);
                Mat mZoomWindow = rgba.Submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100);
                Imgproc.Resize(mZoomWindow, zoomCorner, zoomCorner.Size());
                Size wsize = mZoomWindow.Size();
                Imgproc.Rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.Width - 2, wsize.Height - 2), new Scalar(255, 0, 0, 255), 2);
                zoomCorner.Release();
                mZoomWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Resize(rgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.InterNearest);
                Imgproc.Resize(mIntermediateMat, rgbaInnerWindow, rgbaInnerWindow.Size(), 0.0, 0.0, Imgproc.InterNearest);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
                /*
                 * Imgproc.cvtColor(rgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
                 * Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
                 * Imgproc.cvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
                 */
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
                rgbaInnerWindow.SetTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
                Core.ConvertScaleAbs(rgbaInnerWindow, mIntermediateMat, 1.0 / 16, 0);
                Core.ConvertScaleAbs(mIntermediateMat, rgbaInnerWindow, 16, 0);
                rgbaInnerWindow.Release();
                break;
            }

            return(rgba);
        }
コード例 #7
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            //mRgba = inputFrame.Rgba();

            Bitmap bitmap = BitmapFactory.DecodeResource(Resources, Resource.Drawable.test2);
            Mat    mat    = new Mat();

            Android.Utils.BitmapToMat(bitmap, mat);
            Imgproc.Resize(mat, mRgba, mRgba.Size());
            mat.Release();
            bitmap.Recycle();
            bitmap.Dispose();

            // Start

            // Sign detection
            Mat binSign;

            mSignDetector.Update(mRgba, out binSign);

            // Filter image based on color to find markings
            Mat bin = mLaneMarkFilter.FilterMarkings(mRgba);

            // Generate bird eye view
            float marginX = 0.42f;
            float marginY = 0.65f;

            Mat a, b, birdsEyeView;

            mTransformer.GetBirdEye(bin, mRgba, marginX, marginY, out a, out b, out birdsEyeView);

            // Scale to mini bird view and draw to origin
            Mat birdEyeMiniView = new Mat(birdsEyeView.Size(), CvType.Cv8uc4);// new Mat(birdsEyeView.Height() / 2, birdsEyeView.Width() / 2, mRgba.Type(), new Scalar(0, 255, 0, 255));

            Imgproc.CvtColor(birdsEyeView, birdEyeMiniView, Imgproc.ColorGray2bgra);
            Imgproc.Resize(birdEyeMiniView, birdEyeMiniView, new Size(birdsEyeView.Cols() / 2, birdsEyeView.Rows() / 2));
            birdEyeMiniView.CopyTo(mRgba.RowRange(0, birdsEyeView.Rows() / 2).ColRange(0, birdsEyeView.Cols() / 2));

            List <Core.Rect> rects = mSignDetector.GetSignRects();

            SignDetector.SignType[] types = mSignDetector.GetSignTypes();
            int iRect = 0;

            foreach (var rect in rects)
            {
                if (types[iRect] != SignDetector.SignType.None)
                {
                    Imgproc.Rectangle(mRgba, new Core.Point(rect.X, rect.Y), new Core.Point(rect.X + rect.Width, rect.Y + rect.Height), new Scalar(255, 0, 0, 255), 3);
                }
                iRect++;
            }
            //Imgproc.Resize(binSign, binSign, new Size(mRgba.Cols() / 2, mRgba.Rows() / 2));
            //Mat binSignMini = new Mat(binSign.Size(), CvType.Cv8uc4);
            //Imgproc.CvtColor(binSign, binSignMini, Imgproc.ColorGray2bgra);
            //binSignMini.CopyTo(mRgba.RowRange(0, mRgba.Rows() / 2).ColRange(mRgba.Cols() / 2, mRgba.Cols()));

            // End

            // Release
            birdsEyeView.Release();
            birdEyeMiniView.Release();
            a.Release();
            b.Release();
            bin.Release();

            return(mRgba);
        }
コード例 #8
0
 public Mat DrawRectangle(Mat mat, Rect rect, Scalar colour)
 {
     Imgproc.Rectangle(mat, new Point(rect.X - 2, rect.Y - 2), new Point(rect.X + rect.Width + 2, rect.Y + rect.Height + 2), new Scalar(0, 255, 0));
     return(mat);
 }
コード例 #9
0
        /// <summary>
        /// Run a sliding window algorithm on the bird eye view to find the 2 sides of the lane.
        /// </summary>
        /// <param name="birdEye">bird eye image</param>
        /// <param name="res"> result of windowing </param>
        /// <param name="windows"> number of stacked windows </param>
        public void FitLinesInSlidingWindows(Mat birdEye, out Mat res, int windows)
        {
            LeftPoints  = new List <Point>();
            RightPoints = new List <Point>();

            // erode by 2x2 kernel
            Imgproc.Erode(birdEye, birdEye, Imgproc.GetStructuringElement(Imgproc.MorphRect, new Size(2, 2)));

            // alloc res have the same size and type as bird eye
            res = new Mat(birdEye.Size(), birdEye.Type());

            // convert to BGR for result drawing
            Imgproc.CvtColor(birdEye, res, Imgproc.ColorGray2bgr);

            // crop half bottom of bird eye
            Mat cropped = new Mat(birdEye, new Rect(0, birdEye.Height() / 2, birdEye.Width(), birdEye.Height() / 2));

            // find left and right starting point
            int left, right;

            SlidingWindowsStartLoc(cropped, out left, out right);

            // current window locations
            int currentWindowLeft  = left;
            int currentWindowRight = right;

            // window settings & buffer
            int margin    = 100;
            int minpix    = 140;
            int winHeight = birdEye.Height() / windows;

            // calculate windows
            for (int i = 0; i < windows; i++)
            {
                // calculate window size and location
                int  winYhigh     = birdEye.Height() - i * winHeight;
                int  winXleftLow  = currentWindowLeft - margin;
                int  winXrightLow = currentWindowRight - margin;
                Rect leftRect     = new Rect(winXleftLow, winYhigh - winHeight, margin * 2, winHeight);
                Rect rightRect    = new Rect(winXrightLow, winYhigh - winHeight, margin * 2, winHeight);
                Imgproc.Rectangle(res, new Point(leftRect.X, leftRect.Y), new Point(leftRect.X + leftRect.Width, leftRect.Y + leftRect.Height), new Scalar(20, 20, 255), 3);
                Imgproc.Rectangle(res, new Point(rightRect.X, rightRect.Y), new Point(rightRect.X + rightRect.Width, rightRect.Y + rightRect.Height), new Scalar(20, 20, 255), 3);
                int goodLeft;
                int goodRight;

                // save position
                LeftPoints.Add(new Point(winXleftLow + margin, winYhigh - (winHeight / 2)));
                RightPoints.Add(new Point(winXrightLow + margin, winYhigh - (winHeight / 2)));

                Mat birdEyeROI;

                birdEyeROI = birdEye.Submat(leftRect);
                goodLeft   = Core.Core.CountNonZero(birdEyeROI);

                birdEyeROI = birdEye.Submat(rightRect);
                goodRight  = Core.Core.CountNonZero(birdEyeROI);

                if (goodLeft > minpix)
                {
                    // recenter
                    birdEyeROI        = birdEye.Submat(leftRect);
                    currentWindowLeft = CenterOfLine(birdEyeROI) + leftRect.X;
                }
                if (goodRight > minpix)
                {
                    // recenter
                    birdEyeROI         = birdEye.Submat(rightRect);
                    currentWindowRight = CenterOfLine(birdEyeROI) + rightRect.X;
                }
            }

            // Draw midpoints
            foreach (Point p in LeftPoints)
            {
                //res.Draw(new Rectangle(new Point((int)p.X, (int)p.Y), new Size(20, 20)), new Bgr(50, 50, 230), 12);
                Imgproc.Rectangle(res, new Point((int)p.X, (int)p.Y), new Point(p.X + 20, p.Y + 20), new Scalar(50, 50, 230), 12);
            }
            foreach (Point p in RightPoints)
            {
                //res.Draw(new Rectangle(new Point((int)p.X, (int)p.Y), new Size(20, 20)), new Bgr(50, 50, 230), 12);
                Imgproc.Rectangle(res, new Point((int)p.X, (int)p.Y), new Point(p.X + 20, p.Y + 20), new Scalar(50, 50, 230), 12);
            }
            BirdEye = res;
        }
コード例 #10
0
ファイル: ImageOP.cs プロジェクト: MagicTheAppering/MagicApp
        /// <summary>
        ///
        /// </summary>
        /// <param name="img"></param>
        public static Bitmap detectTextRect(Bitmap img)
        {
            //Matrix für die Bilder
            Mat large = new Mat();
            Mat small = new Mat();
            Mat rgb   = new Mat();

            //Bild zu Matrix umwandeln
            Utils.BitmapToMat(img, large);

            // downsample and use it for processing
            Imgproc.PyrDown(large, rgb);

            //Grey
            Imgproc.CvtColor(rgb, small, Imgproc.ColorBgr2gray);

            //Gradiant
            Mat  grad        = new Mat();
            Size morphsize   = new Size(3.0, 3.0);
            Mat  morphKernel = Imgproc.GetStructuringElement(Imgproc.MorphEllipse, morphsize);

            Imgproc.MorphologyEx(small, grad, Imgproc.MorphGradient, morphKernel);

            //Binarize
            Mat bw = new Mat();

            Imgproc.Threshold(grad, bw, 0.0, 255.0, Imgproc.ThreshBinary | Imgproc.ThreshOtsu);

            // connect horizontally oriented regions
            Mat  connected   = new Mat();
            Size connectsize = new Size(9.0, 1.0);

            morphKernel = Imgproc.GetStructuringElement(Imgproc.MorphRect, connectsize);
            Imgproc.MorphologyEx(bw, connected, Imgproc.MorphClose, morphKernel);

            // find contours
            Mat mask = Mat.Zeros(bw.Size(), CvType.Cv8uc1);

            JavaList <MatOfPoint> contours = new JavaList <MatOfPoint>();
            Mat hierarchy = new Mat();

            OpenCV.Core.Point contourPoint = new OpenCV.Core.Point(0, 0);

            Imgproc.FindContours(connected, contours, hierarchy, Imgproc.RetrCcomp, Imgproc.ChainApproxSimple, contourPoint);

            Scalar zero        = new Scalar(0, 0, 0);
            Scalar contourscal = new Scalar(255, 255, 255);

            Scalar rectScalar = new Scalar(0, 255, 0);



            //Variablen
            OpenCV.Core.Rect rect;
            Mat    maskROI;
            double r;

            double[] contourInfo;

            for (int i = 0; i >= 0;)
            {
                rect = Imgproc.BoundingRect(contours[i]);

                maskROI = new Mat(mask, rect);
                maskROI.SetTo(zero);

                //fill the contour
                Imgproc.DrawContours(mask, contours, i, contourscal, Core.Filled);

                // ratio of non-zero pixels in the filled region
                r = (double)Core.CountNonZero(maskROI) / (rect.Width * rect.Height);

                /* assume at least 45% of the area is filled if it contains text */
                /* constraints on region size */

                /* these two conditions alone are not very robust. better to use something
                 * like the number of significant peaks in a horizontal projection as a third condition */
                if (r > .45 && (rect.Height > 8 && rect.Width > 8))
                {
                    Imgproc.Rectangle(rgb, rect.Br(), rect.Tl(), rectScalar, 2);
                }

                //Nächste Element bestimmen
                contourInfo = hierarchy.Get(0, i);
                i           = (int)contourInfo[0];
            }


            Bitmap resultrgb;

            resultrgb = Bitmap.CreateBitmap(rgb.Cols(), rgb.Rows(), Bitmap.Config.Argb8888);
            Utils.MatToBitmap(rgb, resultrgb);



            return(resultrgb);
        }