コード例 #1
0
 public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
 {
     mRGBA  = inputFrame.Rgba();
     mRGBAT = mRGBA.T();
     Core.Flip(mRGBA.T(), mRGBAT, 1);
     Imgproc.Resize(mRGBAT, mRGBAT, mRGBA.Size());
     return(mRGBAT);
 }
コード例 #2
0
        public bool OnTouch(View v, MotionEvent e)
        {
            int cols = mRgba.Cols();
            int rows = mRgba.Rows();

            int xOffset = (mOpenCvCameraView.Width - cols) / 2;
            int yOffset = (mOpenCvCameraView.Height - rows) / 2;

            int x = (int)e.GetX() - xOffset;
            int y = (int)e.GetY() - yOffset;

            Log.Info("BlobDetection", "Touch image coordinates: (" + x + ", " + y + ")");

            if ((x < 0) || (y < 0) || (x > cols) || (y > rows))
            {
                return(false);
            }

            Rect touchedRect = new Rect();

            touchedRect.X = (x > 4) ? x - 4 : 0;
            touchedRect.Y = (y > 4) ? y - 4 : 0;

            touchedRect.Width  = (x + 4 < cols) ? x + 4 - touchedRect.X : cols - touchedRect.X;
            touchedRect.Height = (y + 4 < rows) ? y + 4 - touchedRect.Y : rows - touchedRect.Y;

            Mat touchedRegionRgba = mRgba.Submat(touchedRect);

            Mat touchedRegionHsv = new Mat();

            Imgproc.CvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.ColorRgb2hsvFull);

            // Calculate average color of touched region
            mBlobColorHsv = Core.SumElems(touchedRegionHsv);
            int pointCount = touchedRect.Width * touchedRect.Height;

            for (int i = 0; i < mBlobColorHsv.Val.Count; i++)
            {
                mBlobColorHsv.Val[i] /= pointCount;
            }

            mBlobColorRgba = ConvertScalarHsv2Rgba(mBlobColorHsv);

            Log.Info("BlobDetection", "Touched rgba color: (" + mBlobColorRgba.Val[0] + ", " + mBlobColorRgba.Val[1] +
                     ", " + mBlobColorRgba.Val[2] + ", " + mBlobColorRgba.Val[3] + ")");

            mDetector.SetHsvColor(mBlobColorHsv);

            Imgproc.Resize(mDetector.Spectrum, mSpectrum, SPECTRUM_SIZE);

            mIsColorSelected = true;

            touchedRegionRgba.Release();
            touchedRegionHsv.Release();

            return(false); // don't need subsequent touch events
        }
コード例 #3
0
ファイル: ImageOP.cs プロジェクト: MagicTheAppering/MagicApp
        /// <summary>
        /// Resizes a image by the given parameters
        /// </summary>
        /// <param name="img">Image to resize</param>
        /// <param name="width">Old Size * width</param>
        /// <param name="heigth">Old Size * height</param>
        /// <returns>resized image</returns>
        public static Bitmap resizeImage(Bitmap img, double width, double heigth)
        {
            //Matrix für das Bild
            Mat src = new Mat();
            Mat dst = new Mat();

            //Bild zu Matrix umwandeln
            Utils.BitmapToMat(img, src);
            //Bild zu Matrix umwandeln
            // downsample and use it for processing
            Imgproc.Resize(src, dst, new Size(src.Size().Width *width, src.Size().Height *heigth));
            Bitmap img1;

            img1 = Bitmap.CreateBitmap(dst.Cols(), dst.Rows(), Bitmap.Config.Argb8888);
            Utils.MatToBitmap(dst, img1);

            return(img1);
        }
コード例 #4
0
        public void Recognize(Mat bin, Mat gray)
        {
            mTypes = new SignType[mRects.Count];

            int iRect = 0;

            foreach (Core.Rect rect in mRects)
            {
                // Crop
                Mat graySign = new Mat(gray, rect);
                Imgproc.Resize(graySign, graySign, new Size(SIGN_WIDTH, SIGN_HEIGHT));

                // Compute HOG descriptor
                MatOfFloat descriptors = new MatOfFloat();
                mHog.Compute(graySign, descriptors);

                Mat fm = new Mat(descriptors.Size(), CvType.Cv32f);
                // predict matrix transposition
                //mTypes[iRect] = (SignType)(int)(mSvm.Predict(fm.T()));
                iRect++;
            }
        }
コード例 #5
0
 void Resize(Mat Target, Size Size)
 {
     Imgproc.Resize(Mat, Target, new Org.Opencv.Core.Size(Size.Width, Size.Height), 0, 0, Imgproc.InterArea);
 }
コード例 #6
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            mRgba = inputFrame.Rgba();
            mGray = inputFrame.Gray();

            if (mAbsoluteFaceSize == 0)
            {
                int height = mGray.Rows();
                if (Math.Round(height * mRelativeFaceSize) > 0)
                {
                    mAbsoluteFaceSize = (int)Math.Round(height * mRelativeFaceSize);
                }
            }

            if (mZoomWindow == null || mZoomWindow2 == null)
            {
                CreateAuxiliaryMats();
            }

            MatOfRect faces = new MatOfRect();

            if (FACE != null)
            {
                FACE.DetectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                      new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
            }

            Rect[] facesArray = faces.ToArray();
            for (int i = 0; i < facesArray.Length; i++)
            {
                Imgproc.Rectangle(mRgba, facesArray[i].Tl(), facesArray[i].Br(), FACE_RECT_COLOR, 3);
                xCenter = (facesArray[i].X + facesArray[i].Width + facesArray[i].X) / 2;
                yCenter = (facesArray[i].Y + facesArray[i].Y + facesArray[i].Height) / 2;
                Point center = new Point(xCenter, yCenter);

                Imgproc.Circle(mRgba, center, 10, new Scalar(255, 0, 0, 255), 3);

                /* --- NOT NEEDED
                 * Imgproc.putText(mRgba, "[" + center.X + "," + center.Y + "]",
                 *      new Point(center.X + 20, center.Y + 20),
                 *      Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255));
                 */

                Rect r = facesArray[i];

                // compute the eye area
                Rect eyearea = new Rect(r.X + r.Width / 8,
                                        (int)(r.Y + (r.Height / 4.5)), r.Width - 2 * r.Width / 8, (int)(r.Height / 3.0));

                // split it
                Rect eyearea_right = new Rect(r.X + r.Width / 16, (int)(r.Y + (r.Height / 4.5)),
                                              (r.Width - 2 * r.Width / 16) / 2, (int)(r.Height / 3.0));

                Rect eyearea_left = new Rect(r.X + r.Width / 16 + (r.Width - 2 * r.Width / 16) / 2,
                                             (int)(r.Y + (r.Height / 4.5)), (r.Width - 2 * r.Width / 16) / 2, (int)(r.Height / 3.0));

                // draw the area - mGray is working grayscale mat, if you want to see area in rgb preview,
                // change mGray to mRgba
                Imgproc.Rectangle(mRgba, eyearea_left.Tl(), eyearea_left.Br(), new Scalar(255, 0, 0, 255), 2);
                Imgproc.Rectangle(mRgba, eyearea_right.Tl(), eyearea_right.Br(), new Scalar(255, 0, 0, 255), 2);

                if (learn_frames < 5)
                {
                    teplateR = get_template(EYE, eyearea_right, 24);
                    teplateL = get_template(EYE, eyearea_left, 24);
                    learn_frames++;
                }
                else
                {
                    // Learning finished, use the new templates for template matching
                    match_eye(eyearea_right, teplateR, method);
                    match_eye(eyearea_left, teplateL, method);
                }

                // cut eye areas and put them to zoom windows
                Imgproc.Resize(mRgba.Submat(eyearea_left), mZoomWindow2, mZoomWindow2.Size());
                Imgproc.Resize(mRgba.Submat(eyearea_right), mZoomWindow, mZoomWindow.Size());
            }

            /*
             * mRgba = inputFrame.Rgba();
             * Log.Info(TAG, "called OnCameraFrame");
             *
             * if (mIsColorSelected)
             * {
             *  mDetector.Process(mRgba);
             *  List<MatOfPoint> contours = mDetector.Contours;
             *  Log.Error(TAG, "Contours count: " + contours.Count);
             *  Imgproc.DrawContours(mRgba, contours, -1, CONTOUR_COLOR);
             *
             *  Mat colorLabel = mRgba.Submat(4, 68, 4, 68);
             *  colorLabel.SetTo(mBlobColorRgba);
             *
             *  Mat spectrumLabel = mRgba.Submat(4, 4 + mSpectrum.Rows(), 70, 70 + mSpectrum.Cols());
             *  mSpectrum.CopyTo(spectrumLabel);
             * }
             */

            return(mRgba);
        }
コード例 #7
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            Mat  rgba     = inputFrame.Rgba();
            Size sizeRgba = rgba.Size();

            Mat rgbaInnerWindow;

            int rows = (int)sizeRgba.Height;
            int cols = (int)sizeRgba.Width;

            int left = cols / 8;
            int top  = rows / 8;

            int width  = cols * 3 / 4;
            int height = rows * 3 / 4;

            switch (ImageManipulationsActivity.viewMode)
            {
            case ImageManipulationsActivity.VIEW_MODE_RGBA:
                break;

            case ImageManipulationsActivity.VIEW_MODE_HIST:
                Mat hist     = new Mat();
                int thikness = (int)(sizeRgba.Width / (mHistSizeNum + 10) / 5);
                if (thikness > 5)
                {
                    thikness = 5;
                }
                int offset = (int)((sizeRgba.Width - (5 * mHistSizeNum + 4 * 10) * thikness) / 2);
                // RGB
                for (int c = 0; c < 3; c++)
                {
                    Imgproc.CalcHist(Arrays.AsList(rgba).Cast <Mat>().ToList(), mChannels[c], mMat0, hist, mHistSize, mRanges);
                    Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                    hist.Get(0, 0, mBuff);
                    for (int h = 0; h < mHistSizeNum; h++)
                    {
                        mP1.X = mP2.X = offset + (c * (mHistSizeNum + 10) + h) * thikness;
                        mP1.Y = sizeRgba.Height - 1;
                        mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                        Imgproc.Line(rgba, mP1, mP2, mColorsRGB[c], thikness);
                    }
                }
                // Value and Hue
                Imgproc.CvtColor(rgba, mIntermediateMat, Imgproc.ColorRgb2hsvFull);
                // Value
                Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[2], mMat0, hist, mHistSize, mRanges);
                Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                hist.Get(0, 0, mBuff);
                for (int h = 0; h < mHistSizeNum; h++)
                {
                    mP1.X = mP2.X = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
                    mP1.Y = sizeRgba.Height - 1;
                    mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                    Imgproc.Line(rgba, mP1, mP2, mWhilte, thikness);
                }
                // Hue
                Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[0], mMat0, hist, mHistSize, mRanges);
                Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf);
                hist.Get(0, 0, mBuff);
                for (int h = 0; h < mHistSizeNum; h++)
                {
                    mP1.X = mP2.X = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
                    mP1.Y = sizeRgba.Height - 1;
                    mP2.Y = mP1.Y - 2 - (int)mBuff[h];
                    Imgproc.Line(rgba, mP1, mP2, mColorsHue[h], thikness);
                }
                break;

            case ImageManipulationsActivity.VIEW_MODE_CANNY:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
                Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_SOBEL:
                Mat gray            = inputFrame.Gray();
                Mat grayInnerWindow = gray.Submat(top, top + height, left, left + width);
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Sobel(grayInnerWindow, mIntermediateMat, CvType.Cv8u, 1, 1);
                Core.ConvertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
                Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4);
                grayInnerWindow.Release();
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_SEPIA:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Core.Transform(rgbaInnerWindow, rgbaInnerWindow, mSepiaKernel);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_ZOOM:
                Mat zoomCorner  = rgba.Submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);
                Mat mZoomWindow = rgba.Submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100);
                Imgproc.Resize(mZoomWindow, zoomCorner, zoomCorner.Size());
                Size wsize = mZoomWindow.Size();
                Imgproc.Rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.Width - 2, wsize.Height - 2), new Scalar(255, 0, 0, 255), 2);
                zoomCorner.Release();
                mZoomWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Resize(rgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.InterNearest);
                Imgproc.Resize(mIntermediateMat, rgbaInnerWindow, rgbaInnerWindow.Size(), 0.0, 0.0, Imgproc.InterNearest);
                rgbaInnerWindow.Release();
                break;

            case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
                /*
                 * Imgproc.cvtColor(rgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
                 * Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
                 * Imgproc.cvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
                 */
                rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width);
                Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
                rgbaInnerWindow.SetTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
                Core.ConvertScaleAbs(rgbaInnerWindow, mIntermediateMat, 1.0 / 16, 0);
                Core.ConvertScaleAbs(mIntermediateMat, rgbaInnerWindow, 16, 0);
                rgbaInnerWindow.Release();
                break;
            }

            return(rgba);
        }
コード例 #8
0
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            //mRgba = inputFrame.Rgba();

            Bitmap bitmap = BitmapFactory.DecodeResource(Resources, Resource.Drawable.test2);
            Mat    mat    = new Mat();

            Android.Utils.BitmapToMat(bitmap, mat);
            Imgproc.Resize(mat, mRgba, mRgba.Size());
            mat.Release();
            bitmap.Recycle();
            bitmap.Dispose();

            // Start

            // Sign detection
            Mat binSign;

            mSignDetector.Update(mRgba, out binSign);

            // Filter image based on color to find markings
            Mat bin = mLaneMarkFilter.FilterMarkings(mRgba);

            // Generate bird eye view
            float marginX = 0.42f;
            float marginY = 0.65f;

            Mat a, b, birdsEyeView;

            mTransformer.GetBirdEye(bin, mRgba, marginX, marginY, out a, out b, out birdsEyeView);

            // Scale to mini bird view and draw to origin
            Mat birdEyeMiniView = new Mat(birdsEyeView.Size(), CvType.Cv8uc4);// new Mat(birdsEyeView.Height() / 2, birdsEyeView.Width() / 2, mRgba.Type(), new Scalar(0, 255, 0, 255));

            Imgproc.CvtColor(birdsEyeView, birdEyeMiniView, Imgproc.ColorGray2bgra);
            Imgproc.Resize(birdEyeMiniView, birdEyeMiniView, new Size(birdsEyeView.Cols() / 2, birdsEyeView.Rows() / 2));
            birdEyeMiniView.CopyTo(mRgba.RowRange(0, birdsEyeView.Rows() / 2).ColRange(0, birdsEyeView.Cols() / 2));

            List <Core.Rect> rects = mSignDetector.GetSignRects();

            SignDetector.SignType[] types = mSignDetector.GetSignTypes();
            int iRect = 0;

            foreach (var rect in rects)
            {
                if (types[iRect] != SignDetector.SignType.None)
                {
                    Imgproc.Rectangle(mRgba, new Core.Point(rect.X, rect.Y), new Core.Point(rect.X + rect.Width, rect.Y + rect.Height), new Scalar(255, 0, 0, 255), 3);
                }
                iRect++;
            }
            //Imgproc.Resize(binSign, binSign, new Size(mRgba.Cols() / 2, mRgba.Rows() / 2));
            //Mat binSignMini = new Mat(binSign.Size(), CvType.Cv8uc4);
            //Imgproc.CvtColor(binSign, binSignMini, Imgproc.ColorGray2bgra);
            //binSignMini.CopyTo(mRgba.RowRange(0, mRgba.Rows() / 2).ColRange(mRgba.Cols() / 2, mRgba.Cols()));

            // End

            // Release
            birdsEyeView.Release();
            birdEyeMiniView.Release();
            a.Release();
            b.Release();
            bin.Release();

            return(mRgba);
        }