Пример #1
0
        public void OnCameraViewStarted(int width, int height)
        {
            mIntermediateMat = new Mat();
            mSize0           = new Size();
            mChannels        = new MatOfInt[] { new MatOfInt(0), new MatOfInt(1), new MatOfInt(2) };
            mBuff            = new float[mHistSizeNum];
            mHistSize        = new MatOfInt(mHistSizeNum);
            mRanges          = new MatOfFloat(0f, 256f);
            mMat0            = new Mat();
            mColorsRGB       = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) };
            mColorsHue       = new Scalar[] {
                new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255),
                new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255),
                new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255),
                new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255),
                new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255)
            };
            mWhilte = Scalar.All(255);
            mP1     = new Point();
            mP2     = new Point();

            // Fill sepia kernel
            mSepiaKernel = new Mat(4, 4, CvType.Cv32f);
            mSepiaKernel.Put(0, 0, /* R */ 0.189f, 0.769f, 0.393f, 0f);
            mSepiaKernel.Put(1, 0, /* G */ 0.168f, 0.686f, 0.349f, 0f);
            mSepiaKernel.Put(2, 0, /* B */ 0.131f, 0.534f, 0.272f, 0f);
            mSepiaKernel.Put(3, 0, /* A */ 0.000f, 0.000f, 0.000f, 1f);
        }
        public Mat GaussianBlur(Mat mat)
        {
            Mat output = new Mat();
            var kernel = new Size(3, 3);

            Imgproc.GaussianBlur(mat, output, kernel, 0);
            return(output);
        }
 public void OnCameraViewStarted(int width, int height)
 {
     mRgba          = new Mat(height, width, CvType.Cv8uc4);
     mDetector      = new ColorBlobDetector();
     mSpectrum      = new Mat();
     mBlobColorRgba = new Scalar(255);
     mBlobColorHsv  = new Scalar(255);
     SPECTRUM_SIZE  = new Size(200, 64);
     CONTOUR_COLOR  = new Scalar(255, 0, 0, 255);
 }
        private Rect GetRoi(Mat mat)
        {
            var boxWidth  = mat.Width() - (mat.Width() / 3);
            var boxHeight = mat.Height() / 6;
            var scanBox   = new Size(boxWidth, boxHeight);

            var xStartingPoint       = mat.Width() / 6;
            var yStartingPoint       = (mat.Height() / 2) - (mat.Height() / 10);
            var scanBoxStartingPoint = new Point(xStartingPoint, yStartingPoint);

            return(new Rect(scanBoxStartingPoint, scanBox));
        }
 public CameraCalibrator(int width, int height)
 {
     mImageSize = new Size(width, height);
     mFlags     = Calib3d.Calib3d.CalibFixPrincipalPoint +
                  Calib3d.Calib3d.CalibZeroTangentDist +
                  Calib3d.Calib3d.CalibFixAspectRatio +
                  Calib3d.Calib3d.CalibFixK4 +
                  Calib3d.Calib3d.CalibFixK5;
     Mat.Eye(3, 3, CvType.Cv64fc1).CopyTo(mCameraMatrix);
     mCameraMatrix.Put(0, 0, 1.0);
     Mat.Zeros(5, 1, CvType.Cv64fc1).CopyTo(mDistortionCoefficients);
     Log.Info(TAG, "Instantiated new " + GetType().ToString());
     mCornersSize = (int)(mPatternSize.Width * mPatternSize.Height);
 }
        public void OnCameraViewStarted(int width, int height)
        {
            mRgba          = new Mat(height, width, CvType.Cv8uc4);
            mDetector      = new LaneDetector();
            mSpectrum      = new Mat();
            mBlobColorRgba = new Scalar(255);
            mBlobColorHsv  = new Scalar(255);
            SPECTRUM_SIZE  = new Size(200, 64);
            CONTOUR_COLOR  = new Scalar(255, 0, 0, 255);

            mDetector       = new LaneDetector();
            mLaneMarkFilter = new LaneMarkingsFilter();
            mTransformer    = new PerspectiveTransformer();

            mSignDetector = new SignDetector();
        }
Пример #7
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="img"></param>
        /// <returns></returns>
        public static Bitmap greyImg(Bitmap img, double threshValue1, double blurValue)
        {
            //Matrix für das Bild
            Mat imgMat = new Mat();

            //Bild zu Matrix umwandeln
            Utils.BitmapToMat(img, imgMat);

            //-----------------Bild bearbeiten---------------------

            //Variablen
            //Size s = new Size(10.0, 10.0);
            Size s = new Size(blurValue, blurValue);

            OpenCV.Core.Point p = new OpenCV.Core.Point(0, 0);

            //TODO Matrix größe beachten?
            Bitmap bmp       = null;
            Mat    tmpgrey   = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    tmpblur   = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    tmpthresh = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));
            Mat    imgresult = new Mat(10, 10, CvType.Cv8uc1, new Scalar(4));

            try
            {
                //Grau
                Imgproc.CvtColor(imgMat, tmpgrey, Imgproc.ColorBgr2gray, 4);

                //Blur
                Imgproc.Blur(tmpgrey, tmpblur, s, p);

                //Thresh
                //Orginal
                //Imgproc.Threshold(tmpblur, tmpthresh, 90, 255, Imgproc.ThreshBinary);
                Imgproc.Threshold(tmpblur, tmpthresh, threshValue1, 255, Imgproc.ThreshBinary);

                //Kontrast
                //tmpthresh.ConvertTo(imgresult, -1, 9.0, 10);

                bmp = Bitmap.CreateBitmap(tmpthresh.Cols(), tmpthresh.Rows(), Bitmap.Config.Argb8888);
                Utils.MatToBitmap(tmpthresh, bmp);
            }
            catch (CvException e) { System.Console.WriteLine(e.Message); }


            return(bmp);
        }
Пример #8
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            Mat  rgba     = inputFrame.Rgba();
            Size sizeRgba = rgba.Size();

            Mat rgbaInnerWindow;

            int rows = (int)sizeRgba.Height;
            int cols = (int)sizeRgba.Width;

            int left = cols / 8;
            int top  = rows / 8;

            int width  = cols * 3 / 4;
            int height = rows * 3 / 4;

            switch (ImageManipulationsActivity.viewMode)
            {
            case ImageManipulationsActivity.VIEW_MODE_RGBA:
                break;

            case ImageManipulationsActivity.VIEW_MODE_HIST:
                Mat hist     = new Mat();
                int thikness = (int)(sizeRgba.Width / (mHistSizeNum + 10) / 5);
                if (thikness > 5)
                {
                    thikness = 5;
                }
                int offset = (int)((sizeRgba.Width - (5 * mHistSizeNum + 4 * 10) * thikness) / 2);
                // RGB
                for (int c = 0; c < 3; c++)
                {
                    Imgproc.CalcHist(Arrays.AsList(rgba).Cast <Mat>().ToList(), mChannels[c], mMat0, hist, mHistSize, mRanges);
                    Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                    hist.Get(0, 0, mBuff);
                    for (int h = 0; h < mHistSizeNum; h++)
                    {
                        mP1.X = mP2.X = offset + (c * (mHistSizeNum + 10) + h) * thikness;
                        mP1.Y = sizeRgba.Height - 1;
                        mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                        Imgproc.Line(rgba, mP1, mP2, mColorsRGB[c], thikness);
                    }
                }
                // Value and Hue
                Imgproc.CvtColor(rgba, mIntermediateMat, Imgproc.ColorRgb2hsvFull);
                // Value
                Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[2], mMat0, hist, mHistSize, mRanges);
                Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                hist.Get(0, 0, mBuff);
                for (int h = 0; h < mHistSizeNum; h++)
                {
                    mP1.X = mP2.X = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
                    mP1.Y = sizeRgba.Height - 1;
                    mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                    Imgproc.Line(rgba, mP1, mP2, mWhilte, thikness);
                }
                // Hue
                Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[0], mMat0, hist, mHistSize, mRanges);
                Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                hist.Get(0, 0, mBuff);
                for (int h = 0; h < mHistSizeNum; h++)
                {
                    mP1.X = mP2.X = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
                    mP1.Y = sizeRgba.Height - 1;
                    mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                    Imgproc.Line(rgba, mP1, mP2, mColorsHue[h], thikness);
                }
                break;

            case ImageManipulationsActivity.VIEW_MODE_CANNY:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
                Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_SOBEL:
                Mat gray            = inputFrame.Gray();
                Mat grayInnerWindow = gray.Submat(top, top + height, left, left + width);
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Sobel(grayInnerWindow, mIntermediateMat, CvType.Cv8u, 1, 1);
                Core.ConvertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
                Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4);
                grayInnerWindow.Release();
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_SEPIA:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Core.Transform(rgbaInnerWindow, rgbaInnerWindow, mSepiaKernel);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_ZOOM:
                Mat zoomCorner  = rgba.Submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);
                Mat mZoomWindow = rgba.Submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100);
                Imgproc.Resize(mZoomWindow, zoomCorner, zoomCorner.Size());
                Size wsize = mZoomWindow.Size();
                Imgproc.Rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.Width - 2, wsize.Height - 2), new Scalar(255, 0, 0, 255), 2);
                zoomCorner.Release();
                mZoomWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Resize(rgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.InterNearest);
                Imgproc.Resize(mIntermediateMat, rgbaInnerWindow, rgbaInnerWindow.Size(), 0.0, 0.0, Imgproc.InterNearest);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
                /*
                 * Imgproc.cvtColor(rgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
                 * Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
                 * Imgproc.cvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
                 */
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
                rgbaInnerWindow.SetTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
                Core.ConvertScaleAbs(rgbaInnerWindow, mIntermediateMat, 1.0 / 16, 0);
                Core.ConvertScaleAbs(mIntermediateMat, rgbaInnerWindow, 16, 0);
                rgbaInnerWindow.Release();
                break;
            }

            return(rgba);
        }
Пример #9
0
        public static async Task <string> detectAndExtractText(Bitmap img)
        {
            //Matrix für die Bilder
            Mat large = new Mat();
            Mat small = new Mat();
            Mat rgb   = new Mat();

            //Bild zu Matrix umwandeln
            Utils.BitmapToMat(img, large);

            // downsample and use it for processing
            Imgproc.PyrDown(large, rgb);

            //Grey
            Imgproc.CvtColor(rgb, small, Imgproc.ColorBgr2gray);

            //Gradiant
            Mat  grad        = new Mat();
            Size morphsize   = new Size(3.0, 3.0);
            Mat  morphKernel = Imgproc.GetStructuringElement(Imgproc.MorphEllipse, morphsize);

            Imgproc.MorphologyEx(small, grad, Imgproc.MorphGradient, morphKernel);

            //Binarize
            Mat bw = new Mat();

            Imgproc.Threshold(grad, bw, 0.0, 255.0, Imgproc.ThreshBinary | Imgproc.ThreshOtsu);

            // connect horizontally oriented regions
            Mat  connected   = new Mat();
            Size connectsize = new Size(9.0, 1.0);

            morphKernel = Imgproc.GetStructuringElement(Imgproc.MorphRect, connectsize);
            Imgproc.MorphologyEx(bw, connected, Imgproc.MorphClose, morphKernel);

            // find contours
            Mat mask = Mat.Zeros(bw.Size(), CvType.Cv8uc1);

            JavaList <MatOfPoint> contours = new JavaList <MatOfPoint>();
            Mat hierarchy = new Mat();

            OpenCV.Core.Point contourPoint = new OpenCV.Core.Point(0, 0);

            Imgproc.FindContours(connected, contours, hierarchy, Imgproc.RetrCcomp, Imgproc.ChainApproxSimple, contourPoint);

            Scalar zero        = new Scalar(0, 0, 0);
            Scalar contourscal = new Scalar(255, 255, 255);

            Scalar rectScalar = new Scalar(0, 255, 0);


            OpenCV.Core.Rect rect;
            Mat    maskROI;
            double r;

            double[] contourInfo;

            string resulttext = "";
            string part;

            Bitmap bmpOcr;
            Mat    croppedPart;


            for (int i = 0; i >= 0;)
            {
                rect = Imgproc.BoundingRect(contours[i]);

                maskROI = new Mat(mask, rect);
                maskROI.SetTo(zero);

                //fill the contour
                Imgproc.DrawContours(mask, contours, i, contourscal, Core.Filled);

                // ratio of non-zero pixels in the filled region
                r = (double)Core.CountNonZero(maskROI) / (rect.Width * rect.Height);

                /* assume at least 45% of the area is filled if it contains text */
                /* constraints on region size */

                /* these two conditions alone are not very robust. better to use something
                 * like the number of significant peaks in a horizontal projection as a third condition */
                if (r > .45 && (rect.Height > 8 && rect.Width > 8))
                {
                    //Imgproc.Rectangle(rgb, rect.Br(), rect.Tl(), rectScalar, 2);
                    try
                    {
                        croppedPart = rgb.Submat(rect);

                        bmpOcr = Bitmap.CreateBitmap(croppedPart.Width(), croppedPart.Height(), Bitmap.Config.Argb8888);
                        Utils.MatToBitmap(croppedPart, bmpOcr);

                        part = await OCR.getText(bmpOcr);

                        resulttext = resulttext + part;
                        Console.WriteLine("------------------Durchlauf-------------");
                    }
                    catch (Exception e)
                    {
                        Android.Util.Log.Debug("Fehler", "cropped part data error " + e.Message);
                    }
                }


                //Nächste Element bestimmen
                contourInfo = hierarchy.Get(0, i);
                i           = (int)contourInfo[0];
            }


            return(resulttext);
        }