コード例 #1
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            int viewMode = mViewMode;

            switch (viewMode)
            {
            case VIEW_MODE_GRAY:
                // input frame has gray scale format
                Imgproc.CvtColor(inputFrame.Gray(), mRgba, Imgproc.ColorGray2rgba, 4);
                break;

            case VIEW_MODE_RGBA:
                // input frame has RBGA format
                mRgba = inputFrame.Rgba();
                break;

            case VIEW_MODE_CANNY:
                // input frame has gray scale format
                mRgba = inputFrame.Rgba();
                Imgproc.Canny(inputFrame.Gray(), mIntermediateMat, 80, 100);
                Imgproc.CvtColor(mIntermediateMat, mRgba, Imgproc.ColorGray2rgba, 4);
                break;

            case VIEW_MODE_FEATURES:
                // input frame has RGBA format
                mRgba = inputFrame.Rgba();
                mGray = inputFrame.Gray();
                FindFeatures(JNIEnv.Handle, JNIEnv.FindClass(typeof(Java.Lang.Object)), mGray.NativeObjAddr, mRgba.NativeObjAddr);
                break;
            }

            return(mRgba);
        }
コード例 #2
0
        public DroidCvPattern(Stream Stream, bool MakeMask)
        {
            var buffer = new byte[Stream.Length];

            using var ms = new MemoryStream(buffer);
            Stream.CopyTo(ms);

            using var raw = new DisposableMat(new MatOfByte(buffer));

            if (MakeMask)
            {
                using var rgbaMat = new DisposableMat(Imgcodecs.Imdecode(raw.Mat, Imgcodecs.CvLoadImageUnchanged));

                Mat = new Mat();
                Imgproc.CvtColor(rgbaMat.Mat, Mat, Imgproc.ColorRgba2gray);

                Mask = new Mat();
                // Extract alpha channel
                Core.ExtractChannel(rgbaMat.Mat, Mask, 3);
                // Mask containing 0 or 255
                Imgproc.Threshold(Mask, Mask, 0, 255, Imgproc.ThreshBinary);
            }
            else
            {
                Mat = Imgcodecs.Imdecode(raw.Mat, Imgcodecs.CvLoadImageGrayscale);
            }
        }
コード例 #3
0
        public static Bitmap getEdgeDetectedImage(this Bitmap src, Filter_Type filter_type)
        {
            Bitmap resizedBitmap = Bitmap.CreateScaledBitmap(src, (src.Width * 256) / src.Height, 256, true);

            OpenCV.Core.Mat resizedMat = new OpenCV.Core.Mat();
            OpenCV.Android.Utils.BitmapToMat(resizedBitmap, resizedMat);

            OpenCV.Core.Mat gaussianMat = new OpenCV.Core.Mat();
            Imgproc.GaussianBlur(resizedMat, gaussianMat, new OpenCV.Core.Size(3, 3), 0, 0);


            OpenCV.Core.Mat grayMat = new OpenCV.Core.Mat();
            Imgproc.CvtColor(gaussianMat, grayMat, Imgproc.ColorRgba2gray, 2);

            OpenCV.Core.Mat edgeDetectedMat = new OpenCV.Core.Mat();
            if (filter_type == Filter_Type.CANNY)
            {
                Imgproc.Canny(grayMat, edgeDetectedMat, 100, 100);
            }
            else
            {
                OpenCV.Core.Mat sobelMat = new OpenCV.Core.Mat();
                Imgproc.Sobel(grayMat, sobelMat, CvType.Cv8u, 1, 1);
                Core.ConvertScaleAbs(sobelMat, edgeDetectedMat, 6, 1);
            }


            Bitmap resultBitmap = Bitmap.CreateBitmap(resizedBitmap.Width, resizedBitmap.Height, Bitmap.Config.Argb8888);

            OpenCV.Android.Utils.MatToBitmap(edgeDetectedMat, resultBitmap);

            return(resultBitmap);
        }
コード例 #4
0
        public void SetHsvColor(Scalar hsvColor)
        {
            double minH = (hsvColor.Val[0] >= mColorRadius.Val[0]) ? hsvColor.Val[0] - mColorRadius.Val[0] : 0;
            double maxH = (hsvColor.Val[0] + mColorRadius.Val[0] <= 255) ? hsvColor.Val[0] + mColorRadius.Val[0] : 255;

            mLowerBound.Val[0] = minH;
            mUpperBound.Val[0] = maxH;

            mLowerBound.Val[1] = hsvColor.Val[1] - mColorRadius.Val[1];
            mUpperBound.Val[1] = hsvColor.Val[1] + mColorRadius.Val[1];

            mLowerBound.Val[2] = hsvColor.Val[2] - mColorRadius.Val[2];
            mUpperBound.Val[2] = hsvColor.Val[2] + mColorRadius.Val[2];

            mLowerBound.Val[3] = 0;
            mUpperBound.Val[3] = 255;

            Mat spectrumHsv = new Mat(1, (int)(maxH - minH), CvType.Cv8uc3);

            for (int j = 0; j < maxH - minH; j++)
            {
                byte[] tmp = { (byte)(minH + j), (byte)255, (byte)255 };
                spectrumHsv.Put(0, j, tmp);
            }

            Imgproc.CvtColor(spectrumHsv, mSpectrum, Imgproc.ColorHsv2rgbFull, 4);
        }
コード例 #5
0
        void SetImage()
        {
            ImageView iView = FindViewById <ImageView> (Resource.Id.imageView1);

            using (Bitmap img = BitmapFactory.DecodeResource(Resources, Resource.Drawable.lena))
            {
                if (img != null)
                {
                    Mat m = new Mat();
                    grayM = new Mat();

                    Utils.BitmapToMat(img, m);

                    Imgproc.CvtColor(m, grayM, Imgproc.ColorBgr2gray);

                    Imgproc.CvtColor(grayM, m, Imgproc.ColorGray2rgba);

                    using (Bitmap bm = Bitmap.CreateBitmap(m.Cols(), m.Rows(), Bitmap.Config.Argb8888)) {
                        Utils.MatToBitmap(m, bm);

                        iView.SetImageBitmap(bm);
                    }

                    m.Release();
                    grayM.Release();
                }
            }
        }
コード例 #6
0
        public IPattern TakeScreenshot()
        {
            _superUser.SendCommand($"/system/bin/screencap {_imgPath}");

            using var f      = File.OpenRead(_imgPath);
            using var reader = new BinaryReader(f, Encoding.ASCII);
            var w      = reader.ReadInt32();
            var h      = reader.ReadInt32();
            var format = reader.ReadInt32();

            if (Build.VERSION.SdkInt >= BuildVersionCodes.O)
            {
                reader.ReadInt32();
            }

            if (_buffer == null)
            {
                // If format is not RGBA, notify
                if (format != 1)
                {
                    AutomataApi.Toast($"Unexpected raw image format: {format}");
                }

                _buffer      = new byte[w * h * 4];
                _rootLoadMat = new Mat(h, w, CvType.Cv8uc4);
            }

            reader.Read(_buffer, 0, _buffer.Length);

            _rootLoadMat.Put(0, 0, _buffer);

            Imgproc.CvtColor(_rootLoadMat, _rootConvertMat, Imgproc.ColorRgba2gray);

            return(new DroidCvPattern(_rootConvertMat, false));
        }
コード例 #7
0
ファイル: MainActivity.cs プロジェクト: albno273/OpenCVTest
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);

            adapter = new TabsAdapter(this, SupportFragmentManager);
            pager   = FindViewById <ViewPager>(Resource.Id.viewpager);
            var tabs = FindViewById <TabLayout>(Resource.Id.tabs);

            pager.Adapter = adapter;
            tabs.SetupWithViewPager(pager);
            pager.OffscreenPageLimit = 3;

            pager.PageSelected += (sender, args) =>
            {
                var fragment = adapter.InstantiateItem(pager, args.Position) as IFragmentVisible;

                fragment?.BecameVisible();
            };

            Toolbar.MenuItemClick += (sender, e) =>
            {
                var intent = new Intent(this, typeof(AddItemActivity));;
                StartActivity(intent);
            };

            SupportActionBar.SetDisplayHomeAsUpEnabled(false);
            SupportActionBar.SetHomeButtonEnabled(false);

            var src         = new Mat[2];
            var dst         = new Mat[2];
            var keyPoints1  = new MatOfKeyPoint();
            var keyPoints2  = new MatOfKeyPoint();
            var descripter1 = new Mat();
            var descripter2 = new Mat();
            var dmatch      = new MatOfDMatch();
            var output      = new Mat();

            src[0] = Imgcodecs.Imread("path/to/source/1.png");
            src[1] = Imgcodecs.Imread("path/to/source/2.png");
            dst[0] = new Mat();
            dst[1] = new Mat();
            Imgproc.CvtColor(src[0], dst[0], Imgproc.COLORBayerGR2GRAY);
            Imgproc.CvtColor(src[1], dst[1], Imgproc.COLORBayerGR2GRAY);

            var akaze    = FeatureDetector.Create(FeatureDetector.Akaze);
            var executor = DescriptorExtractor.Create(DescriptorExtractor.Akaze);

            akaze.Detect(dst[0], keyPoints1);
            akaze.Detect(dst[1], keyPoints2);

            executor.Compute(dst[0], keyPoints1, descripter1);
            executor.Compute(dst[1], keyPoints2, descripter2);

            var matcher = DescriptorMatcher.Create(DescriptorMatcher.BruteforceHamming);

            matcher.Match(descripter1, descripter2, dmatch);

            Features2d.DrawMatches(src[0], keyPoints1, src[1], keyPoints2, dmatch, output);
        }
コード例 #8
0
        public Mat ToBGR(Mat mat)
        {
            Mat bgr = new Mat();

            Imgproc.CvtColor(mat, bgr, Imgproc.ColorGray2bgr);

            return(bgr);
        }
コード例 #9
0
        public Mat ToGray(Mat mat)
        {
            Mat gray = new Mat();

            Imgproc.CvtColor(mat, gray, Imgproc.ColorBgr2gray);

            return(gray);
        }
コード例 #10
0
        /// <summary>
        /// Equalizes the histogram of an image and thresholds the result.
        /// </summary>
        /// <param name="src"> Image to eq & thresh </param>
        /// <param name="thresh"> thresh level </param>
        /// <param name="max"> max value to use </param>
        /// <returns> binary image (high pixels are in range) </returns>
        private Mat GetWhiteFromHistogramEq(Mat src, byte thresh, byte max)
        {
            Mat gray = new Mat(src.Size(), src.Type());

            Imgproc.CvtColor(src, gray, Imgproc.ColorBgr2gray);
            Imgproc.EqualizeHist(gray, gray);
            Imgproc.Threshold(gray, gray, thresh, max, Imgproc.ThreshBinary);
            return(gray);
        }
コード例 #11
0
        private Scalar ConvertScalarHsv2Rgba(Scalar hsvColor)
        {
            Mat pointMatRgba = new Mat();
            Mat pointMatHsv  = new Mat(1, 1, CvType.Cv8uc3, hsvColor);

            Imgproc.CvtColor(pointMatHsv, pointMatRgba, Imgproc.ColorHsv2rgbFull, 4);

            return(new Scalar(pointMatRgba.Get(0, 0)));
        }
コード例 #12
0
        public bool OnTouch(View v, MotionEvent e)
        {
            int cols = mRgba.Cols();
            int rows = mRgba.Rows();

            int xOffset = (mOpenCvCameraView.Width - cols) / 2;
            int yOffset = (mOpenCvCameraView.Height - rows) / 2;

            int x = (int)e.GetX() - xOffset;
            int y = (int)e.GetY() - yOffset;

            Log.Info("BlobDetection", "Touch image coordinates: (" + x + ", " + y + ")");

            if ((x < 0) || (y < 0) || (x > cols) || (y > rows))
            {
                return(false);
            }

            Rect touchedRect = new Rect();

            touchedRect.X = (x > 4) ? x - 4 : 0;
            touchedRect.Y = (y > 4) ? y - 4 : 0;

            touchedRect.Width  = (x + 4 < cols) ? x + 4 - touchedRect.X : cols - touchedRect.X;
            touchedRect.Height = (y + 4 < rows) ? y + 4 - touchedRect.Y : rows - touchedRect.Y;

            Mat touchedRegionRgba = mRgba.Submat(touchedRect);

            Mat touchedRegionHsv = new Mat();

            Imgproc.CvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.ColorRgb2hsvFull);

            // Calculate average color of touched region
            mBlobColorHsv = Core.SumElems(touchedRegionHsv);
            int pointCount = touchedRect.Width * touchedRect.Height;

            for (int i = 0; i < mBlobColorHsv.Val.Count; i++)
            {
                mBlobColorHsv.Val[i] /= pointCount;
            }

            mBlobColorRgba = ConvertScalarHsv2Rgba(mBlobColorHsv);

            Log.Info("BlobDetection", "Touched rgba color: (" + mBlobColorRgba.Val[0] + ", " + mBlobColorRgba.Val[1] +
                     ", " + mBlobColorRgba.Val[2] + ", " + mBlobColorRgba.Val[3] + ")");

            mDetector.SetHsvColor(mBlobColorHsv);

            Imgproc.Resize(mDetector.Spectrum, mSpectrum, SPECTRUM_SIZE);

            mIsColorSelected = true;

            touchedRegionRgba.Release();
            touchedRegionHsv.Release();

            return(false); // don't need subsequent touch events
        }
コード例 #13
0
        /// <summary>
        /// HSV color filter, passes all pixels between the min-max limits.
        /// </summary>
        /// <param name="src"> Image to filter </param>
        /// <param name="min"> lower hsv point </param>
        /// <param name="max"> upper hsv point </param>
        /// <returns> binary image (high pixels are in range) </returns>
        public Mat FilterHSV(Mat src, Scalar min, Scalar max)
        {
            Mat hsv = new Mat(src.Size(), src.Type());

            Imgproc.CvtColor(src, hsv, Imgproc.ColorBgr2hsv);
            Mat result = new Mat(hsv.Size(), CvType.Cv8uc1);

            Core.Core.InRange(hsv, min, max, result);
            hsv.Release();
            return(result);
        }
コード例 #14
0
ファイル: ImageOP.cs プロジェクト: MagicTheAppering/MagicApp
        /// <summary>
        ///
        /// </summary>
        /// <param name="img"></param>
        /// <returns></returns>
        public static Bitmap greyImg(Bitmap img, double threshValue1, double blurValue)
        {
            //Matrix für das Bild
            Mat imgMat = new Mat();

            //Bild zu Matrix umwandeln
            Utils.BitmapToMat(img, imgMat);

            //-----------------Bild bearbeiten---------------------

            //Variablen
            //Size s = new Size(10.0, 10.0);
            Size s = new Size(blurValue, blurValue);

            OpenCV.Core.Point p = new OpenCV.Core.Point(0, 0);

            //TODO Matrix größe beachten?
            Bitmap bmp       = null;
            Mat    tmpgrey   = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    tmpblur   = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    tmpthresh = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    imgresult = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));

            try
            {
                //Grau
                Imgproc.CvtColor(imgMat, tmpgrey, Imgproc.ColorBgr2gray, 4);

                //Blur
                Imgproc.Blur(tmpgrey, tmpblur, s, p);

                //Thresh
                //Orginal
                //Imgproc.Threshold(tmpblur, tmpthresh, 90, 255, Imgproc.ThreshBinary);
                Imgproc.Threshold(tmpblur, tmpthresh, threshValue1, 255, Imgproc.ThreshBinary);

                //Kontrast
                //tmpthresh.ConvertTo(imgresult, -1, 9.0, 10);

                bmp = Bitmap.CreateBitmap(tmpthresh.Cols(), tmpthresh.Rows(), Bitmap.Config.Argb8888);
                Utils.MatToBitmap(tmpthresh, bmp);
            }
            catch (CvException e) { System.Console.WriteLine(e.Message); }


            return(bmp);
        }
コード例 #15
0
        public SignType Update(Mat src, out Mat bin)
        {
            Mat hsv = new Mat(), gray = new Mat();

            Imgproc.CvtColor(src, hsv, Imgproc.ColorBgr2hsv);
            Imgproc.CvtColor(src, gray, Imgproc.ColorBgr2gray);

            //ip->Binarialize(hsvImg, EColor::BLUE, binImg);
            bin = FilterMarkings(hsv);

            if (Detect(bin))
            {
                Recognize(bin, gray);
            }

            return(SignType.None);
        }
コード例 #16
0
        public void Process(Mat rgbaImage)
        {
            Log.Info(TAG, "Process rgbaImages");

            Imgproc.PyrDown(rgbaImage, mPyrDownMat);
            Imgproc.PyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.CvtColor(mPyrDownMat, mHsvMat, Imgproc.ColorRgb2hsvFull);

            Core.InRange(mHsvMat, mLowerBound, mUpperBound, mMask);
            Imgproc.Dilate(mMask, mDilatedMask, new Mat());

            IList <MatOfPoint> contours = new JavaList <MatOfPoint>();

            Imgproc.FindContours(mDilatedMask, contours, mHierarchy, Imgproc.RetrExternal, Imgproc.ChainApproxSimple);

            // Find max contour area
            double maxArea = 0;

            foreach (var each in contours)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.ContourArea(wrapper);
                if (area > maxArea)
                {
                    maxArea = area;
                }
                Log.Info(TAG, "Process rgbaImages\t-- Imgproc.ContourArea(wrapper)");
            }

            // Filter contours by area and resize to fit the original image size
            mContours.Clear();
            foreach (var each in contours)
            {
                MatOfPoint contour = each;
                if (Imgproc.ContourArea(contour) > mMinContourArea * maxArea)
                {
                    Core.Multiply(contour, new Scalar(4, 4), contour);
                    mContours.Add(contour);
                    Log.Info(TAG, "Process rgbaImages\t-- mContours.Add(contour)");
                }
            }
        }
コード例 #17
0
        IPattern MatFromImage(Image Image)
        {
            var width  = Image.Width;
            var height = Image.Height;

            var planes = Image.GetPlanes();
            var buffer = planes[0].Buffer;

            if (_readBitmap == null)
            {
                var pixelStride = planes[0].PixelStride;
                var rowStride   = planes[0].RowStride;
                var rowPadding  = rowStride - pixelStride * width;

                _cropRequired = (rowPadding / pixelStride) != 0;

                _readBitmap = Bitmap.CreateBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.Argb8888);
            }

            _readBitmap.CopyPixelsFromBuffer(buffer);

            if (_cropRequired)
            {
                var correctedBitmap = Bitmap.CreateBitmap(_readBitmap, 0, 0, width, height);
                Org.Opencv.Android.Utils.BitmapToMat(correctedBitmap, _convertedMat);
                // if a new Bitmap was created, we need to tell the Garbage Collector to delete it immediately
                correctedBitmap.Recycle();
            }
            else
            {
                Org.Opencv.Android.Utils.BitmapToMat(_readBitmap, _convertedMat);
            }

            Imgproc.CvtColor(_convertedMat, _colorCorrectedMat, Imgproc.ColorRgba2gray);

            return(new DroidCvPattern(_colorCorrectedMat, false));
        }
コード例 #18
0
ファイル: OpenCVWrapper.cs プロジェクト: richardf75/Samples
        private IList <MatOfPoint> ProcessImage()
        {
            Mat grayMat  = new Mat();
            Mat blurMat  = new Mat();
            Mat edgesMat = new Mat();
            Mat final    = new Mat();
            Mat h        = new Mat();

            IList <MatOfPoint> contours = new JavaList <MatOfPoint>();

            OpenCV.Android.Utils.BitmapToMat(originalImage, originalMat);
            originalImage.Dispose();
            Imgproc.CvtColor(originalMat, grayMat, Imgproc.ColorBgr2gray);
            Imgproc.GaussianBlur(grayMat, blurMat, new OpenCV.Core.Size(3, 3), 0);
            Imgproc.Canny(blurMat, edgesMat, 10, 250);


            Mat kernel = Imgproc.GetStructuringElement(Imgproc.MorphRect, new Size(3, 3));

            Imgproc.MorphologyEx(edgesMat, final, Imgproc.MorphClose, kernel);

            Imgproc.FindContours(final, contours, h, Imgproc.RetrExternal, Imgproc.ChainApproxSimple);
            return(contours);
        }
コード例 #19
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            Mat  rgba     = inputFrame.Rgba();
            Size sizeRgba = rgba.Size();

            Mat rgbaInnerWindow;

            int rows = (int)sizeRgba.Height;
            int cols = (int)sizeRgba.Width;

            int left = cols / 8;
            int top  = rows / 8;

            int width  = cols * 3 / 4;
            int height = rows * 3 / 4;

            switch (ImageManipulationsActivity.viewMode)
            {
            case ImageManipulationsActivity.VIEW_MODE_RGBA:
                break;

            case ImageManipulationsActivity.VIEW_MODE_HIST:
                Mat hist     = new Mat();
                int thikness = (int)(sizeRgba.Width / (mHistSizeNum + 10) / 5);
                if (thikness > 5)
                {
                    thikness = 5;
                }
                int offset = (int)((sizeRgba.Width - (5 * mHistSizeNum + 4 * 10) * thikness) / 2);
                // RGB
                for (int c = 0; c < 3; c++)
                {
                    Imgproc.CalcHist(Arrays.AsList(rgba).Cast <Mat>().ToList(), mChannels[c], mMat0, hist, mHistSize, mRanges);
                    Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                    hist.Get(0, 0, mBuff);
                    for (int h = 0; h < mHistSizeNum; h++)
                    {
                        mP1.X = mP2.X = offset + (c * (mHistSizeNum + 10) + h) * thikness;
                        mP1.Y = sizeRgba.Height - 1;
                        mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                        Imgproc.Line(rgba, mP1, mP2, mColorsRGB[c], thikness);
                    }
                }
                // Value and Hue
                Imgproc.CvtColor(rgba, mIntermediateMat, Imgproc.ColorRgb2hsvFull);
                // Value
                Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[2], mMat0, hist, mHistSize, mRanges);
                Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                hist.Get(0, 0, mBuff);
                for (int h = 0; h < mHistSizeNum; h++)
                {
                    mP1.X = mP2.X = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
                    mP1.Y = sizeRgba.Height - 1;
                    mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                    Imgproc.Line(rgba, mP1, mP2, mWhilte, thikness);
                }
                // Hue
                Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[0], mMat0, hist, mHistSize, mRanges);
                Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                hist.Get(0, 0, mBuff);
                for (int h = 0; h < mHistSizeNum; h++)
                {
                    mP1.X = mP2.X = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
                    mP1.Y = sizeRgba.Height - 1;
                    mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                    Imgproc.Line(rgba, mP1, mP2, mColorsHue[h], thikness);
                }
                break;

            case ImageManipulationsActivity.VIEW_MODE_CANNY:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
                Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_SOBEL:
                Mat gray            = inputFrame.Gray();
                Mat grayInnerWindow = gray.Submat(top, top + height, left, left + width);
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Sobel(grayInnerWindow, mIntermediateMat, CvType.Cv8u, 1, 1);
                Core.ConvertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
                Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4);
                grayInnerWindow.Release();
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_SEPIA:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Core.Transform(rgbaInnerWindow, rgbaInnerWindow, mSepiaKernel);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_ZOOM:
                Mat zoomCorner  = rgba.Submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);
                Mat mZoomWindow = rgba.Submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100);
                Imgproc.Resize(mZoomWindow, zoomCorner, zoomCorner.Size());
                Size wsize = mZoomWindow.Size();
                Imgproc.Rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.Width - 2, wsize.Height - 2), new Scalar(255, 0, 0, 255), 2);
                zoomCorner.Release();
                mZoomWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Resize(rgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.InterNearest);
                Imgproc.Resize(mIntermediateMat, rgbaInnerWindow, rgbaInnerWindow.Size(), 0.0, 0.0, Imgproc.InterNearest);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
                /*
                 * Imgproc.cvtColor(rgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
                 * Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
                 * Imgproc.cvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
                 */
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
                rgbaInnerWindow.SetTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
                Core.ConvertScaleAbs(rgbaInnerWindow, mIntermediateMat, 1.0 / 16, 0);
                Core.ConvertScaleAbs(mIntermediateMat, rgbaInnerWindow, 16, 0);
                rgbaInnerWindow.Release();
                break;
            }

            return(rgba);
        }
コード例 #20
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            //mRgba = inputFrame.Rgba();

            Bitmap bitmap = BitmapFactory.DecodeResource(Resources, Resource.Drawable.test2);
            Mat    mat    = new Mat();

            Android.Utils.BitmapToMat(bitmap, mat);
            Imgproc.Resize(mat, mRgba, mRgba.Size());
            mat.Release();
            bitmap.Recycle();
            bitmap.Dispose();

            // Start

            // Sign detection
            Mat binSign;

            mSignDetector.Update(mRgba, out binSign);

            // Filter image based on color to find markings
            Mat bin = mLaneMarkFilter.FilterMarkings(mRgba);

            // Generate bird eye view
            float marginX = 0.42f;
            float marginY = 0.65f;

            Mat a, b, birdsEyeView;

            mTransformer.GetBirdEye(bin, mRgba, marginX, marginY, out a, out b, out birdsEyeView);

            // Scale to mini bird view and draw to origin
            Mat birdEyeMiniView = new Mat(birdsEyeView.Size(), CvType.Cv8uc4);// new Mat(birdsEyeView.Height() / 2, birdsEyeView.Width() / 2, mRgba.Type(), new Scalar(0, 255, 0, 255));

            Imgproc.CvtColor(birdsEyeView, birdEyeMiniView, Imgproc.ColorGray2bgra);
            Imgproc.Resize(birdEyeMiniView, birdEyeMiniView, new Size(birdsEyeView.Cols() / 2, birdsEyeView.Rows() / 2));
            birdEyeMiniView.CopyTo(mRgba.RowRange(0, birdsEyeView.Rows() / 2).ColRange(0, birdsEyeView.Cols() / 2));

            List <Core.Rect> rects = mSignDetector.GetSignRects();

            SignDetector.SignType[] types = mSignDetector.GetSignTypes();
            int iRect = 0;

            foreach (var rect in rects)
            {
                if (types[iRect] != SignDetector.SignType.None)
                {
                    Imgproc.Rectangle(mRgba, new Core.Point(rect.X, rect.Y), new Core.Point(rect.X + rect.Width, rect.Y + rect.Height), new Scalar(255, 0, 0, 255), 3);
                }
                iRect++;
            }
            //Imgproc.Resize(binSign, binSign, new Size(mRgba.Cols() / 2, mRgba.Rows() / 2));
            //Mat binSignMini = new Mat(binSign.Size(), CvType.Cv8uc4);
            //Imgproc.CvtColor(binSign, binSignMini, Imgproc.ColorGray2bgra);
            //binSignMini.CopyTo(mRgba.RowRange(0, mRgba.Rows() / 2).ColRange(mRgba.Cols() / 2, mRgba.Cols()));

            // End

            // Release
            birdsEyeView.Release();
            birdEyeMiniView.Release();
            a.Release();
            b.Release();
            bin.Release();

            return(mRgba);
        }
コード例 #21
0
ファイル: ImageOP.cs プロジェクト: MagicTheAppering/MagicApp
        public static async Task <string> detectAndExtractText(Bitmap img)
        {
            //Matrix für die Bilder
            Mat large = new Mat();
            Mat small = new Mat();
            Mat rgb   = new Mat();

            //Bild zu Matrix umwandeln
            Utils.BitmapToMat(img, large);

            // downsample and use it for processing
            Imgproc.PyrDown(large, rgb);

            //Grey
            Imgproc.CvtColor(rgb, small, Imgproc.ColorBgr2gray);

            //Gradiant
            Mat  grad        = new Mat();
            Size morphsize   = new Size(3.0, 3.0);
            Mat  morphKernel = Imgproc.GetStructuringElement(Imgproc.MorphEllipse, morphsize);

            Imgproc.MorphologyEx(small, grad, Imgproc.MorphGradient, morphKernel);

            //Binarize
            Mat bw = new Mat();

            Imgproc.Threshold(grad, bw, 0.0, 255.0, Imgproc.ThreshBinary | Imgproc.ThreshOtsu);

            // connect horizontally oriented regions
            Mat  connected   = new Mat();
            Size connectsize = new Size(9.0, 1.0);

            morphKernel = Imgproc.GetStructuringElement(Imgproc.MorphRect, connectsize);
            Imgproc.MorphologyEx(bw, connected, Imgproc.MorphClose, morphKernel);

            // find contours
            Mat mask = Mat.Zeros(bw.Size(), CvType.Cv8uc1);

            JavaList <MatOfPoint> contours = new JavaList <MatOfPoint>();
            Mat hierarchy = new Mat();

            OpenCV.Core.Point contourPoint = new OpenCV.Core.Point(0, 0);

            Imgproc.FindContours(connected, contours, hierarchy, Imgproc.RetrCcomp, Imgproc.ChainApproxSimple, contourPoint);

            Scalar zero        = new Scalar(0, 0, 0);
            Scalar contourscal = new Scalar(255, 255, 255);

            Scalar rectScalar = new Scalar(0, 255, 0);


            OpenCV.Core.Rect rect;
            Mat    maskROI;
            double r;

            double[] contourInfo;

            string resulttext = "";
            string part;

            Bitmap bmpOcr;
            Mat    croppedPart;


            for (int i = 0; i >= 0;)
            {
                rect = Imgproc.BoundingRect(contours[i]);

                maskROI = new Mat(mask, rect);
                maskROI.SetTo(zero);

                //fill the contour
                Imgproc.DrawContours(mask, contours, i, contourscal, Core.Filled);

                // ratio of non-zero pixels in the filled region
                r = (double)Core.CountNonZero(maskROI) / (rect.Width * rect.Height);

                /* assume at least 45% of the area is filled if it contains text */
                /* constraints on region size */

                /* these two conditions alone are not very robust. better to use something
                 * like the number of significant peaks in a horizontal projection as a third condition */
                if (r > .45 && (rect.Height > 8 && rect.Width > 8))
                {
                    //Imgproc.Rectangle(rgb, rect.Br(), rect.Tl(), rectScalar, 2);
                    try
                    {
                        croppedPart = rgb.Submat(rect);

                        bmpOcr = Bitmap.CreateBitmap(croppedPart.Width(), croppedPart.Height(), Bitmap.Config.Argb8888);
                        Utils.MatToBitmap(croppedPart, bmpOcr);

                        part = await OCR.getText(bmpOcr);

                        resulttext = resulttext + part;
                        Console.WriteLine("------------------Durchlauf-------------");
                    }
                    catch (Exception e)
                    {
                        Android.Util.Log.Debug("Fehler", "cropped part data error " + e.Message);
                    }
                }


                //Nächste Element bestimmen
                contourInfo = hierarchy.Get(0, i);
                i           = (int)contourInfo[0];
            }


            return(resulttext);
        }
コード例 #22
0
        public override bool OnOptionsItemSelected(IMenuItem item)
        {
            Log.Info(Tag, "Menu Item selected " + item);

            if (item == _itemPickPhoto)
            {
                var imageIntent = new Intent();
                imageIntent.SetType("image/*");
                imageIntent.SetAction(Intent.ActionGetContent);
                StartActivityForResult(Intent.CreateChooser(imageIntent, "Select photo"), 0);
            }
            else if (item == _itemGray)
            {
                // 灰度图
                _gray = new Mat(_raw.Width(), _raw.Height(), CvType.Cv8uc1);
                Imgproc.CvtColor(_raw, _gray, Imgproc.ColorRgb2gray);
                ShowImage(_gray);
            }
            else if (item == _itemThreshold)
            {
                // 二值化
                _threshold = new Mat(_image.Width, _image.Height, CvType.Cv8uc1);
                Imgproc.Threshold(_gray, _threshold, 168, 255, Imgproc.ThreshBinary);
                ShowImage(_threshold);
            }
            else if (item == _itemFindContours)
            {
                // 查找最大连通区域
                IList <MatOfPoint> contours = new JavaList <MatOfPoint>();
                Mat hierarchy = new Mat();
                var target    = _threshold.Clone();
                Imgproc.FindContours(target, contours, hierarchy, Imgproc.RetrExternal, Imgproc.ChainApproxNone);

                MatOfPoint max = new MatOfPoint();
                double     contour_area_max = 0;
                if (contours.Any())
                {
                    foreach (var contour in contours)
                    {
                        var contour_area_temp = Math.Abs(Imgproc.ContourArea(contour));
                        if (contour_area_temp > contour_area_max)
                        {
                            contour_area_max = contour_area_temp;
                            max = contour;
                        }
                    }
                }

                var last = new JavaList <MatOfPoint>();
                last.Add(max);

                Imgproc.DrawContours(_raw, last, -1, new Scalar(255, 0, 0));

                ShowImage(_raw);
            }
            else if (item == _itemCreateTrimap)
            {
                // 生成三元图  暂时先用生成的图替代
                var imageIntent = new Intent();
                imageIntent.SetType("image/*");
                imageIntent.SetAction(Intent.ActionGetContent);
                StartActivityForResult(Intent.CreateChooser(imageIntent, "Select photo"), 1);
            }
            else if (item == _itemSharedMatting)
            {
                // 扣图
                var sharedMatting = new SharedMatting();
                sharedMatting.SetImage(_raw);
                sharedMatting.SetTrimap(_trimap);
                sharedMatting.SolveAlpha();
            }

            return(base.OnOptionsItemSelected(item));
        }
コード例 #23
0
        /// <summary>
        /// Run a sliding window algorithm on the bird eye view to find the 2 sides of the lane.
        /// </summary>
        /// <param name="birdEye">bird eye image</param>
        /// <param name="res"> result of windowing </param>
        /// <param name="windows"> number of stacked windows </param>
        public void FitLinesInSlidingWindows(Mat birdEye, out Mat res, int windows)
        {
            LeftPoints  = new List <Point>();
            RightPoints = new List <Point>();

            // erode by 2x2 kernel
            Imgproc.Erode(birdEye, birdEye, Imgproc.GetStructuringElement(Imgproc.MorphRect, new Size(2, 2)));

            // alloc res have the same size and type as bird eye
            res = new Mat(birdEye.Size(), birdEye.Type());

            // convert to BGR for result drawing
            Imgproc.CvtColor(birdEye, res, Imgproc.ColorGray2bgr);

            // crop half bottom of bird eye
            Mat cropped = new Mat(birdEye, new Rect(0, birdEye.Height() / 2, birdEye.Width(), birdEye.Height() / 2));

            // find left and right starting point
            int left, right;

            SlidingWindowsStartLoc(cropped, out left, out right);

            // current window locations
            int currentWindowLeft  = left;
            int currentWindowRight = right;

            // window settings & buffer
            int margin    = 100;
            int minpix    = 140;
            int winHeight = birdEye.Height() / windows;

            // calculate windows
            for (int i = 0; i < windows; i++)
            {
                // calculate window size and location
                int  winYhigh     = birdEye.Height() - i * winHeight;
                int  winXleftLow  = currentWindowLeft - margin;
                int  winXrightLow = currentWindowRight - margin;
                Rect leftRect     = new Rect(winXleftLow, winYhigh - winHeight, margin * 2, winHeight);
                Rect rightRect    = new Rect(winXrightLow, winYhigh - winHeight, margin * 2, winHeight);
                Imgproc.Rectangle(res, new Point(leftRect.X, leftRect.Y), new Point(leftRect.X + leftRect.Width, leftRect.Y + leftRect.Height), new Scalar(20, 20, 255), 3);
                Imgproc.Rectangle(res, new Point(rightRect.X, rightRect.Y), new Point(rightRect.X + rightRect.Width, rightRect.Y + rightRect.Height), new Scalar(20, 20, 255), 3);
                int goodLeft;
                int goodRight;

                // save position
                LeftPoints.Add(new Point(winXleftLow + margin, winYhigh - (winHeight / 2)));
                RightPoints.Add(new Point(winXrightLow + margin, winYhigh - (winHeight / 2)));

                Mat birdEyeROI;

                birdEyeROI = birdEye.Submat(leftRect);
                goodLeft   = Core.Core.CountNonZero(birdEyeROI);

                birdEyeROI = birdEye.Submat(rightRect);
                goodRight  = Core.Core.CountNonZero(birdEyeROI);

                if (goodLeft > minpix)
                {
                    // recenter
                    birdEyeROI        = birdEye.Submat(leftRect);
                    currentWindowLeft = CenterOfLine(birdEyeROI) + leftRect.X;
                }
                if (goodRight > minpix)
                {
                    // recenter
                    birdEyeROI         = birdEye.Submat(rightRect);
                    currentWindowRight = CenterOfLine(birdEyeROI) + rightRect.X;
                }
            }

            // Draw midpoints
            foreach (Point p in LeftPoints)
            {
                //res.Draw(new Rectangle(new Point((int)p.X, (int)p.Y), new Size(20, 20)), new Bgr(50, 50, 230), 12);
                Imgproc.Rectangle(res, new Point((int)p.X, (int)p.Y), new Point(p.X + 20, p.Y + 20), new Scalar(50, 50, 230), 12);
            }
            foreach (Point p in RightPoints)
            {
                //res.Draw(new Rectangle(new Point((int)p.X, (int)p.Y), new Size(20, 20)), new Bgr(50, 50, 230), 12);
                Imgproc.Rectangle(res, new Point((int)p.X, (int)p.Y), new Point(p.X + 20, p.Y + 20), new Scalar(50, 50, 230), 12);
            }
            BirdEye = res;
        }
コード例 #24
0
        public Mat OnCameraFrame(Mat frame)
        {
            //Only done on first frame.
            //If ROI it not initailized, then we draw it on the frame.
            //However, if we have already drawn it on previous frame
            //then we don't need to draw it again.
            if (roi == null)
            {
                roi = GetRoi(frame);
            }

            // TODO: provide these in a global config file
            var contourMinHeight = frame.Height() / 34;
            var contourMaxHeight = frame.Height() / 4;

            // Initial rotation because camera starts in landscape mode.
            var input = _openCV.Rotate(frame, -90);


            rgba = input.Clone();
            var submat      = input.Submat(roi);
            var submatClone = submat.Clone();

            rgba = _openCV.DrawRectangle(rgba, roi, new Scalar(0, 255, 0));

            // Turn image black and white.
            var gray = _openCV.ToGray(submat);

            // Brightness and contrast correction trough Histogram Equalization
            var equalized = _openCV.EqualizeHistogram(gray);

            // Blur image to reduce noise.
            var blur = _openCV.GaussianBlur(equalized);

            // Detect blobs using Otsu Tresholding
            var blobs = _openCV.OtsuThresh(blur);

            // Find contours in image. (clone because it this version of OpenCV applies some changes to the input image)
            var clone    = blobs.Clone();
            var contours = _openCV.FindContours(clone);

            // Filter contours based on size -> then by Y position and height of their bounding boxes.
            var contoursBySize  = _openCV.FilterContoursBySize(contours.Item1, contourMinHeight, contourMaxHeight);
            var alignedContours = _openCV.FilterContoursByYPosition(contoursBySize.Item1, contoursBySize.Item2);

            // Draw bounding boxes on input image for user visualization.
            var withBoundingBoxes = _openCV.DrawBoundingBoxes(submatClone.Clone(), alignedContours.Item2);

            withBoundingBoxes.CopyTo(rgba.Submat(roi));

            // Discard the frame if less than 8 matching contours are found. We want all the digits on the gas meter before processing.
            // TODO: if gasmeters have less than 8 digits, then what?
            if (alignedContours.Item2.Count != 8)
            {
                return(rgba);
            }

            // Prepare output for OCR and stop the camera feed.
            try
            {
                var digits      = new List <Stream>();
                var image       = new Mat();
                var digitsClone = new List <Stream>();

                // Transform BGR image to RGB
                Imgproc.CvtColor(submatClone, submatClone, Imgproc.ColorBgr2rgba, 4);

                image = submatClone;

                // Sort digit bounding boxes left to right
                var sorted = _openCV.SortRects(alignedContours.Item2);

                // Cut each digit individually based on bounding box.
                foreach (Rect rect in sorted)
                {
                    digits.Add(_openCV.MatToStream(new Mat(blobs, rect)));
                    digitsClone.Add(_openCV.MatToStream(new Mat(blobs, rect)));
                }

                // TODO: Crop output image to region of interest when that is implemented.
                // Return digits and final image to display on confirmation page.
                MessagingCenter.Send(new CameraResultMessage {
                    DigitsClone = digitsClone, Digits = digits, Image = _openCV.MatToStream(image)
                }, CameraResultMessage.Key);

                // Stop the camera feed and close the page
                input.Release();
                submat.Release();
                gray.Release();
                equalized.Release();
                blur.Release();
                blobs.Release();
                clone.Release();
                contours.Item2.Release();
                submatClone.Release();
                withBoundingBoxes.Release();
                image.Release();
                Finish();
            }
            catch (Exception)
            {
                // Processing the rectangles sometimes fail throwing an exception. These frames can discarded and the feed continues.
                return(rgba);
            }

            return(rgba);
        }