コード例 #1
0
 //Remove Background
 IEnumerator BackgroundSubtraction()
 {
     Imgproc.cvtColor(openCVCreateMat.rgbaMat, openCVCreateMat.rgbMat, Imgproc.COLOR_RGBA2RGB);
     backgroundSubstractorMOG2.apply(openCVCreateMat.rgbMat, openCVCreateMat.fgmaskMat);
     Core.bitwise_not(openCVCreateMat.fgmaskMat, openCVCreateMat.fgmaskMat);
     openCVCreateMat.rgbaMat.setTo(new Scalar(0, 0, 0, 0), openCVCreateMat.fgmaskMat);
     if (blurImage == true)
     {
         //Gaussian filter of the MOG2 images
         Imgproc.GaussianBlur(openCVCreateMat.rgbaMat, openCVCreateMat.rgbaMat, kernelSize, sigmaX, sigmaY);            //Gauss filter
     }
     yield return(null);
 }
コード例 #2
0
 void ProcessImg(Texture2D texture, Mat mat, Mat edge, double min, double max)
 {
     Utils.texture2DToMat(texture, mat);
     //		Imgproc.resize(mat,mat,new Size(standardWidth,standardHeight));
     Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGRA);
     Imgproc.GaussianBlur(mat, mat, new Size(5, 5), 1.4, 1.4);
     Imgproc.Canny(mat, edge, min, max);
     Debug.Log("Processed!");
     //		Debug.Log (dstMat.size());
     //		for (int i = 0; i < 50; i++) {
     //			Debug.Log (dstMat.get(512+i,512+i)[0]);
     //			Debug.Log (dstEdgeMat.get(512+i,512+i)[0]);
     //		}
 }
コード例 #3
0
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame() && initialized)
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();
            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
            bgMat.copyTo(dstMat);

            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);
            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;
                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;
                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;
                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);
            maskMat.put(0, 0, maskPixels);
            grayMat.copyTo(dstMat, maskMat);

            Imgproc.Canny(lineMat, lineMat, 20, 120);
            lineMat.copyTo(maskMat);
            Core.bitwise_not(lineMat, lineMat);
            lineMat.copyTo(dstMat, maskMat);
            Utils.matToTexture2D(dstMat, texture);             //, webCamTextureToMatHelper.GetBufferColors());
        }
    }
コード例 #4
0
    public void Sharpen()
    {
        warpedTexture = new Texture2D(previewRawImage.mainTexture.width, previewRawImage.mainTexture.height, TextureFormat.RGB24, false);
        Graphics.CopyTexture(previewRawImage.texture, warpedTexture);

        Mat initMat = new Mat(warpedTexture.height, warpedTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(warpedTexture, initMat);
        Mat finalMat = new Mat(warpedTexture.height, warpedTexture.width, CvType.CV_8UC3);

        Imgproc.GaussianBlur(initMat, finalMat, new Size(0, 0), 3);

        Core.addWeighted(initMat, 1.5, finalMat, -.5, 0, finalMat);

        Utils.matToTexture2D(finalMat, warpedTexture);
        initMat.Dispose();
        finalMat.Dispose();
        previewRawImage.texture = warpedTexture;
        warpedTexture           = null;
        System.GC.Collect();
    }
コード例 #5
0
    void Start()
    {
        srcMat  = Imgcodecs.imread(Application.dataPath + "/Textures/feature.jpg", 1);
        grayMat = new Mat();
        Imgproc.cvtColor(srcMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

        //会把五边形识别成圆。模糊处理,提高精确度。
        Imgproc.GaussianBlur(grayMat, grayMat, new Size(7, 7), 2, 2);

        Mat circles = new Mat();

        //霍夫圆
        Imgproc.HoughCircles(grayMat, circles, Imgproc.CV_HOUGH_GRADIENT, 2, 10, 160, 50, 10, 40);
        //Debug.Log(circles);

        //圆心坐标
        Point pt = new Point();

        for (int i = 0; i < circles.cols(); i++)
        {
            double[] data = circles.get(0, i);
            pt.x = data[0];
            pt.y = data[1];
            double rho = data[2];
            //绘制圆心
            Imgproc.circle(srcMat, pt, 3, new Scalar(255, 255, 0), -1, 8, 0);
            //绘制圆轮廓
            Imgproc.circle(srcMat, pt, (int)rho, new Scalar(255, 0, 0, 255), 5);
        }

        //在Mat上写字
        Imgproc.putText(srcMat, "W:" + srcMat.width() + " H:" + srcMat.height(), new Point(5, srcMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

        Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height());
        Sprite    sp  = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_showImage.sprite         = sp;
        m_showImage.preserveAspect = true;
        Utils.matToTexture2D(srcMat, t2d);
    }
コード例 #6
0
    private List <MatOfPoint> GetContours(Mat newImage)
    {
        Mat edges     = new Mat();
        Mat hierarchy = new Mat();
        List <MatOfPoint> contours    = new List <MatOfPoint>();
        List <MatOfPoint> bigContours = new List <MatOfPoint>();

        Imgproc.GaussianBlur(newImage, newImage, new Size(3, 3), 1);
        Imgproc.Canny(newImage, edges, 75, 255);
        Imgproc.dilate(edges, edges, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(1, 1)), new Point(-1, -1), 2);
        Imgproc.findContours(edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        foreach (var contour in contours)
        {
            double area = Imgproc.contourArea(contour);
            if (area > 500)
            {
                bigContours.Add(contour);
            }
        }

        return(bigContours);
    }
コード例 #7
0
    private void updateFilter(byte[] framebuffer)
    {
        rgbaMat.put(0, 0, framebuffer);

        Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
        bgMat.copyTo(dstMat);
        Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);
        grayMat.get(0, 0, grayPixels);

        for (int i = 0; i < grayPixels.Length; i++)
        {
            maskPixels [i] = 0;
            if (grayPixels [i] < 70)
            {
                grayPixels [i] = 0;
                maskPixels [i] = 1;
            }
            else if (70 <= grayPixels [i] && grayPixels [i] < 120)
            {
                grayPixels [i] = 100;
            }
            else
            {
                grayPixels [i] = 255;
                maskPixels [i] = 1;
            }
        }
        grayMat.put(0, 0, grayPixels);
        maskMat.put(0, 0, maskPixels);
        grayMat.copyTo(dstMat, maskMat);

        Imgproc.Canny(lineMat, lineMat, 20, 120);
        lineMat.copyTo(maskMat);
        Core.bitwise_not(lineMat, lineMat);
        lineMat.copyTo(dstMat, maskMat);
        Utils.matToTexture2D(dstMat, texture);
    }
コード例 #8
0
ファイル: OpenCVWrapper.cs プロジェクト: richardf75/Samples
        private IList <MatOfPoint> ProcessImage()
        {
            Mat grayMat  = new Mat();
            Mat blurMat  = new Mat();
            Mat edgesMat = new Mat();
            Mat final    = new Mat();
            Mat h        = new Mat();

            IList <MatOfPoint> contours = new JavaList <MatOfPoint>();

            OpenCV.Android.Utils.BitmapToMat(originalImage, originalMat);
            originalImage.Dispose();
            Imgproc.CvtColor(originalMat, grayMat, Imgproc.ColorBgr2gray);
            Imgproc.GaussianBlur(grayMat, blurMat, new OpenCV.Core.Size(3, 3), 0);
            Imgproc.Canny(blurMat, edgesMat, 10, 250);


            Mat kernel = Imgproc.GetStructuringElement(Imgproc.MorphRect, new Size(3, 3));

            Imgproc.MorphologyEx(edgesMat, final, Imgproc.MorphClose, kernel);

            Imgproc.FindContours(final, contours, h, Imgproc.RetrExternal, Imgproc.ChainApproxSimple);
            return(contours);
        }
コード例 #9
0
ファイル: watershed.cs プロジェクト: Hengle/OpenCVForUnity
    //findContours分割技术
    private static Mat MyFindLargestRectangle(Mat original_image)
    {
        Mat imgSource = original_image;

        Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);
        Imgproc.Canny(imgSource, imgSource, 50, 50);
        Imgproc.GaussianBlur(imgSource, imgSource, new Size(5, 5), 5);
        List <MatOfPoint> contours = new List <MatOfPoint>();

        Imgproc.findContours(imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
        double       maxArea         = 0;
        int          maxAreaIdx      = -1;
        MatOfPoint   largest_contour = contours[0];
        MatOfPoint2f approxCurve     = new MatOfPoint2f();

        for (int idx = 0; idx < contours.Count; idx++)
        {
            MatOfPoint temp_contour = contours[idx];
            double     contourarea  = Imgproc.contourArea(temp_contour);
            if (contourarea - maxArea > 1)
            {
                maxArea         = contourarea;
                largest_contour = temp_contour;
                maxAreaIdx      = idx;
                MatOfPoint2f new_mat     = new MatOfPoint2f(temp_contour.toArray());
                int          contourSize = (int)temp_contour.total();
                Imgproc.approxPolyDP(new_mat, approxCurve, contourSize * 0.05, true);
            }
        }

        Imgproc.drawContours(imgSource, contours, -1, new Scalar(255, 0, 0), 1);
        Imgproc.fillConvexPoly(imgSource, largest_contour, new Scalar(255, 255, 255));
        Imgproc.drawContours(imgSource, contours, maxAreaIdx, new Scalar(0, 0, 255), 3);

        return(imgSource);
    }
コード例 #10
0
        public void Process(Mat src, Mat dst)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src == null");
            }

            if (dst == null)
            {
                throw new ArgumentNullException("dst == null");
            }

            if (grayMat != null && (grayMat.width() != src.width() || grayMat.height() != src.height()))
            {
                grayMat.Dispose();
                grayMat = null;
                lineMat.Dispose();
                lineMat = null;
                maskMat.Dispose();
                maskMat = null;
                bgMat.Dispose();
                bgMat = null;
                grayDstMat.Dispose();
                grayDstMat = null;

                grayPixels = null;
                maskPixels = null;
            }
            grayMat = grayMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);
            lineMat = lineMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);
            maskMat = maskMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);
            //create a striped background.
            bgMat = new Mat(src.height(), src.width(), CvType.CV_8UC1, new Scalar(255));
            for (int i = 0; i < bgMat.rows() * 2.5f; i = i + 4)
            {
                Imgproc.line(bgMat, new Point(0, 0 + i), new Point(bgMat.cols(), -bgMat.cols() + i), new Scalar(0), 1);
            }
            grayDstMat = grayDstMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);

            grayPixels = grayPixels ?? new byte[grayMat.cols() * grayMat.rows() * grayMat.channels()];
            maskPixels = maskPixels ?? new byte[maskMat.cols() * maskMat.rows() * maskMat.channels()];


            Imgproc.cvtColor(src, grayMat, Imgproc.COLOR_RGBA2GRAY);
            bgMat.copyTo(grayDstMat);
            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);
            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;
                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;
                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;
                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);
            maskMat.put(0, 0, maskPixels);
            grayMat.copyTo(grayDstMat, maskMat);

            Imgproc.Canny(lineMat, lineMat, 20, 120);
            lineMat.copyTo(maskMat);
            Core.bitwise_not(lineMat, lineMat);
            lineMat.copyTo(grayDstMat, maskMat);

            Imgproc.cvtColor(grayDstMat, dst, Imgproc.COLOR_GRAY2RGBA);
        }
コード例 #11
0
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);

//						Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours [boundPos].toArray()));
            Core.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

//						Debug.Log (
//						" Row start [" +
//								(int)boundRect.tl ().y + "] row end [" +
//								(int)boundRect.br ().y + "] Col start [" +
//								(int)boundRect.tl ().x + "] Col end [" +
//								(int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

//						Debug.Log (
//						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
            Core.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours [boundPos].toArray()), pointMat, 3, true);
            contours [boundPos] = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours [boundPos].toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contours [boundPos].toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();
            List <Point>      listPo     = new List <Point> ();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours [boundPos].toList() [hull.toList() [j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <MatOfPoint> defectPoints = new List <MatOfPoint> ();
            List <Point>      listPoDefect = new List <Point> ();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours [boundPos].toList() [convexDefect.toList() [j + 2]];
                int   depth    = convexDefect.toList() [j + 3];
                if (depth > threasholdSlider.value && farPoint.y < a)
                {
                    listPoDefect.Add(contours [boundPos].toList() [convexDefect.toList() [j + 2]]);
                }
//								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

//						Debug.Log ("hull: " + hull.toList ());
//						Debug.Log ("defects: " + convexDefect.toList ());

            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

//                      int defectsTotal = (int)convexDefect.total();
//						Debug.Log ("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

//						Debug.Log ("numberOfFingers " + numberOfFingers);

//						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Core.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
コード例 #12
0
ファイル: SceneController.cs プロジェクト: drywitte/sherpa
    Mat getHolds(Mat matImg)
    {
        // Debug.Log("Mat img size was: " + matImg.size().ToString());
        Imgproc.pyrDown(matImg, matImg);
        // Debug.Log("Mat img size is now: " + matImg.size().ToString());
        Mat binary = new Mat(matImg.rows(), matImg.cols(), CvType.CV_8UC4);

        Imgproc.GaussianBlur(matImg, binary, new Size(5, 5), (double)0.0);

        // create grayscale mat
        Imgproc.cvtColor(binary, binary, Imgproc.COLOR_BGR2GRAY);
        List <MatOfPoint> contours = new List <MatOfPoint>();

        // find threshold for edges
        Mat threshold = new Mat();

        Imgproc.threshold(binary, threshold, 0, 255, Imgproc.THRESH_BINARY + Imgproc.THRESH_OTSU);

        // find edges
        Mat edges = new Mat();

        // not sure abou this
        Imgproc.Canny(threshold, edges, 50, 50, 3);

        // find contours
        Mat hierarchy = new Mat();

        OpenCVForUnity.ImgprocModule.Imgproc.findContours(edges, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

        // find hulls
        List <MatOfInt> hullInts = new List <MatOfInt>();

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfInt hull = new MatOfInt();
            Imgproc.convexHull(new MatOfPoint(contours[i].toArray()), hull);
            hullInts.Add(hull);
        }
        // Debug.Log("Count of hulls is " + hullInts.Count.ToString());

        // add hulls to a list
        List <MatOfPoint> hullPts = new List <MatOfPoint>();
        List <Point>      listPo  = new List <Point>();
        Mat contourMat            = Mat.zeros(matImg.rows(), matImg.cols(), CvType.CV_8UC4); //new Mat(_webcam.height, _webcam.width, CvType.CV_8UC4);

        // Mat mask = Mat.zeros(matImg.rows(), matImg.cols(), CvType.CV_8UC4);
        // MatOfPoint e = new MatOfPoint();

        // for (int i = 0; i < contours.Count; i++)
        // {
        //  listPo.Clear();
        //  hullPts.Clear();
        //  for (int j = 0; j < hullInts[i].toList().Count; j++)
        //  {
        //      listPo.Add(contours[i].toList()[hullInts[i].toList()[j]]);

        //  }
        //  e.fromList(listPo);
        //  hullPts.Add(e);
        //  Imgproc.drawContours(mask, hullPts, 0, new Scalar(0, 255, 0), -4);
        //  e = new MatOfPoint();
        // }

        // create mask of hulls
        matImg.copyTo(contourMat);  //mask);
        // Imgproc.pyrUp(mask, mask);

        Imgproc.cvtColor(contourMat, contourMat, Imgproc.COLOR_BGR2RGBA);

        // dispose
        hierarchy.Dispose();
        // mask.Dispose();
        binary.Dispose();
        threshold.Dispose();
        edges.Dispose();
        // e.Dispose();
        contours.Clear();

        return(contourMat);
    }
コード例 #13
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                //						Utils.webCamTextureToMat (webCamTexture, grayMat, colors);


                bgMat.copyTo(dstMat);


                Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);



                grayMat.get(0, 0, grayPixels);

                for (int i = 0; i < grayPixels.Length; i++)
                {
                    maskPixels [i] = 0;

                    if (grayPixels [i] < 70)
                    {
                        grayPixels [i] = 0;

                        maskPixels [i] = 1;
                    }
                    else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                    {
                        grayPixels [i] = 100;
                    }
                    else
                    {
                        grayPixels [i] = 255;

                        maskPixels [i] = 1;
                    }
                }

                grayMat.put(0, 0, grayPixels);

                maskMat.put(0, 0, maskPixels);

                grayMat.copyTo(dstMat, maskMat);



                Imgproc.Canny(lineMat, lineMat, 20, 120);

                lineMat.copyTo(maskMat);

                Core.bitwise_not(lineMat, lineMat);

                lineMat.copyTo(dstMat, maskMat);

//			Imgproc.putText (dstMat, "W:" + dstMat.width () + " H:" + dstMat.height () + " SO:" + Screen.orientation, new Point (5, dstMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (0), 2, Imgproc.LINE_AA, false);


                //		Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA);
                //				Utils.matToTexture2D (rgbaMat, texture);

                Utils.matToTexture2D(dstMat, texture, colors);
            }
        }
コード例 #14
0
    // Update is called once per frame
    void Update()
    {
        if (!initDone)
        {
            return;
        }

        if (screenOrientation != Screen.orientation)
        {
            screenOrientation = Screen.orientation;
            updateLayout();
        }

                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
        if (webCamTexture.width > 16 && webCamTexture.height > 16)
        {
                                #else
        if (webCamTexture.didUpdateThisFrame)
        {
                                                #endif

            Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

            //flip to correct direction.
            if (webCamDevice.isFrontFacing)
            {
                if (webCamTexture.videoRotationAngle == 0)
                {
                    Core.flip(rgbaMat, rgbaMat, 1);
                }
                else if (webCamTexture.videoRotationAngle == 90)
                {
                    Core.flip(rgbaMat, rgbaMat, 0);
                }
                if (webCamTexture.videoRotationAngle == 180)
                {
                    Core.flip(rgbaMat, rgbaMat, 0);
                }
                else if (webCamTexture.videoRotationAngle == 270)
                {
                    Core.flip(rgbaMat, rgbaMat, 1);
                }
            }
            else
            {
                if (webCamTexture.videoRotationAngle == 180)
                {
                    Core.flip(rgbaMat, rgbaMat, -1);
                }
                else if (webCamTexture.videoRotationAngle == 270)
                {
                    Core.flip(rgbaMat, rgbaMat, -1);
                }
            }

            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

//						Utils.webCamTextureToMat (webCamTexture, grayMat, colors);


            bgMat.copyTo(dstMat);


            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);



            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;

                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;

                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;

                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);

            maskMat.put(0, 0, maskPixels);

            grayMat.copyTo(dstMat, maskMat);



            Imgproc.Canny(lineMat, lineMat, 20, 120);

            lineMat.copyTo(maskMat);

            Core.bitwise_not(lineMat, lineMat);

            lineMat.copyTo(dstMat, maskMat);


//		Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA);
//				Utils.matToTexture2D (rgbaMat, texture);

            Utils.matToTexture2D(dstMat, texture, colors);
        }
    }

    void OnDisable()
    {
        webCamTexture.Stop();
    }
コード例 #15
0
    //public bool verificaImagem(Texture2D texture, Texture2D texture2)
    //{
    //    Texture2D camFoto = texture;
    //    Texture2D printTela = texture2;

    //    // Escala de cinza. CV_8UC1
    //    Mat img1Mat = new Mat(camFoto.height, camFoto.width, CvType.CV_8UC1);
    //    Utils.texture2DToMat(camFoto, img1Mat);

    //    // Escala de cinza. CV_8UC1
    //    Mat img2Mat = new Mat(printTela.height, printTela.width, CvType.CV_8UC1);
    //    Utils.texture2DToMat(printTela, img2Mat);

    //    Imgproc.GaussianBlur(img1Mat, img1Mat, new Size(5, 5), 0);
    //    Imgproc.threshold(img1Mat, img1Mat, 100, 255, Imgproc.THRESH_BINARY);

    //    Imgproc.GaussianBlur(img2Mat, img2Mat, new Size(5, 5), 0);
    //    Imgproc.threshold(img2Mat, img2Mat, 240, 255, Imgproc.THRESH_BINARY);


    //    //Create the result mat
    //    int result_cols = img1Mat.cols() - img2Mat.cols() + 1;
    //    int result_rows = img1Mat.rows() - img2Mat.rows() + 1;
    //    Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1);

    //    int match_method = Imgproc.TM_CCOEFF_NORMED;

    //    Imgproc.matchTemplate(img1Mat, img2Mat, result, match_method);
    //    Debug.Log(match_method);

    //    return match_method <= 1;
    //}

    public bool verificaImagem(Texture2D textParam, Texture2D textParam2)
    {
        var bytes = textParam.EncodeToJPG();
        //File.WriteAllBytes("imagem1_tratamento.png", bytes);
        //bytes = textParam2.EncodeToJPG();
        //File.WriteAllBytes("imagem2_tratamento.png", bytes);

        //Texture2D imgTexture = Resources.Load("circulo") as Texture2D;
        Texture2D camFoto   = textParam;
        Texture2D printTela = textParam2;

        // Escala de cinza. CV_8UC1
        Mat img1Mat = new Mat(camFoto.height, camFoto.width, CvType.CV_8UC1);

        Utils.texture2DToMat(camFoto, img1Mat);

        // Escala de cinza. CV_8UC1
        Mat img2Mat = new Mat(printTela.height, printTela.width, CvType.CV_8UC1);

        Utils.texture2DToMat(printTela, img2Mat);

        Imgproc.GaussianBlur(img1Mat, img1Mat, new Size(5, 5), 0);
        Texture2D tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false);

        //Utils.matToTexture2D(img1Mat, tex3);
        //bytes = tex3.EncodeToJPG();
        //File.WriteAllBytes("imagem1_tratamento_gaussian.png", bytes);
        Imgproc.threshold(img1Mat, img1Mat, 100, 255, Imgproc.THRESH_BINARY);
        tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false);
        Utils.matToTexture2D(img1Mat, tex3);
        bytes = tex3.EncodeToJPG();
        File.WriteAllBytes("imagem1_tratamento_threshold.png", bytes);

        Imgproc.GaussianBlur(img2Mat, img2Mat, new Size(5, 5), 0);
        Texture2D tex4 = new Texture2D(img2Mat.cols(), img2Mat.rows(), TextureFormat.RGBA32, false);

        //Utils.matToTexture2D(img2Mat, tex4);
        //bytes = tex4.EncodeToJPG();
        //File.WriteAllBytes("imagem2_tratamento_gaussian.png", bytes);
        Imgproc.threshold(img2Mat, img2Mat, 240, 255, Imgproc.THRESH_BINARY);
        tex4 = new Texture2D(img2Mat.cols(), img2Mat.rows(), TextureFormat.RGBA32, false);
        Utils.matToTexture2D(img2Mat, tex4);
        bytes = tex4.EncodeToJPG();
        File.WriteAllBytes("imagem2_tratamento_threshold.png", bytes);

        ORB detector  = ORB.create();
        ORB extractor = ORB.create();

        MatOfKeyPoint keypoints1   = new MatOfKeyPoint();
        Mat           descriptors1 = new Mat();

        detector.detect(img1Mat, keypoints1);
        extractor.compute(img1Mat, keypoints1, descriptors1);

        MatOfKeyPoint keypoints2   = new MatOfKeyPoint();
        Mat           descriptors2 = new Mat();

        detector.detect(img2Mat, keypoints2);
        extractor.compute(img2Mat, keypoints2, descriptors2);

        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
        MatOfDMatch       matches = new MatOfDMatch();

        matcher.match(descriptors1, descriptors2, matches);

        List <MatOfDMatch> lista = new List <MatOfDMatch>();

        lista.Add(matches);

        matcher.knnMatch(descriptors1, descriptors2, lista, 2);

        long total = 0;

        foreach (MatOfDMatch item in lista)
        {
            if (item.toList()[0].distance < 0.75 * item.toList()[1].distance)
            {
                total++;
            }
        }

        long number_keypoints = 0;

        if (keypoints1.elemSize() <= keypoints2.elemSize())
        {
            number_keypoints = keypoints1.elemSize();
        }
        else
        {
            number_keypoints = keypoints2.elemSize();
        }

        Debug.Log(total / number_keypoints * 100);

        return((total / number_keypoints * 100) >= 70);
    }
コード例 #16
0
        // Image Processing function - called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                //load camera feed into matrix
                Mat frame = webCamTextureToMatHelper.GetMat();

                //clone frame to new variable
                Mat cameraFeed = frame.clone();


                //apply blurring methods to image
                Imgproc.GaussianBlur(cameraFeed, cameraFeed, new Size(5, 5), 0);
                Imgproc.medianBlur(cameraFeed, cameraFeed, 3);


                //convert to hsv colour space
                Mat hsv_image = new Mat();
                Imgproc.cvtColor(cameraFeed, hsv_image, Imgproc.COLOR_BGR2HSV);

                //create thresholds for colour isolation
                Mat blue_hue_range = new Mat();
                Mat red_hue_range  = new Mat();
                Mat lower_red      = new Mat();
                Mat upper_red      = new Mat();

                //upper and lower red colour thresholds
                Core.inRange(hsv_image, new Scalar(0, 100, 100), new Scalar(10, 200, 200), lower_red);
                Core.inRange(hsv_image, new Scalar(160, 100, 100), new Scalar(179, 255, 255), upper_red);

                //add red thresholds together
                Core.addWeighted(lower_red, 1.0, upper_red, 1.0, 0.0, red_hue_range);

                Core.inRange(hsv_image, new Scalar(115, 100, 100), new Scalar(135, 200, 200), blue_hue_range);

                //add red and blue thresholds together
                Mat hue_image = new Mat();
                Core.addWeighted(blue_hue_range, 1.0, red_hue_range, 1.0, 0.0, hue_image);

                //noise reduction on hsv image
                Imgproc.GaussianBlur(hue_image, hue_image, new Size(9, 9), 5);

                Mat erodeElement  = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
                Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(8, 8));

                Imgproc.erode(hue_image, hue_image, erodeElement);
                Imgproc.dilate(hue_image, hue_image, dilateElement);

                //find contours in image
                System.Collections.Generic.List <MatOfPoint> circles = new System.Collections.Generic.List <MatOfPoint>();
                Mat hierarchy = new Mat();

                Imgproc.findContours(hue_image, circles, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

                //find circles and draw if radius is > 30
                for (int i = 0; i < circles.Count; i++)
                {
                    Point   pt     = new Point();
                    float[] radius = new float[1];
                    Imgproc.minEnclosingCircle(new MatOfPoint2f(circles[i].toArray()), pt, radius);

                    int r = (int)radius[0];

                    if (r > 30)
                    {
                        Imgproc.circle(frame, pt, r, new Scalar(0, 255, 0), 3);
                    }
                }

                //output either frame with circles drawn or hsv feed depending on status of change camera button
                if (camMode == false)
                {
                    Utils.matToTexture2D(frame, texture, webCamTextureToMatHelper.GetBufferColors());
                }
                else
                {
                    Utils.matToTexture2D(hue_image, texture, webCamTextureToMatHelper.GetBufferColors());
                }
            }
        }
コード例 #17
0
ファイル: Main.cs プロジェクト: eugenejahn/ARNoteTaking
        private Mat findPaper(Mat mainMat)
        {
            Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);
            // blur image
            Imgproc.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0);


            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels[i] = 0;

                if (grayPixels[i] < 70)
                {
                    grayPixels[i] = 0;

                    //maskPixels [i] = 1;
                }
                else if (70 <= grayPixels[i] && grayPixels[i] < 120)
                {
                    grayPixels[i] = 100;
                }
                else
                {
                    grayPixels[i] = 255;
                    //maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);

            //thresholding make mage blake and white
            Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU);

            //extract the edge image
            Imgproc.Canny(grayMat, grayMat, 50, 50);


            //prepare for finding contours
            List <MatOfPoint> contours = new List <MatOfPoint>();

            Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            List <MatOfPoint> tmpTargets = new List <MatOfPoint>();


            for (int i = 0; i < contours.Count; i++)
            {
                MatOfPoint   cp = contours[i];
                MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
                double       p  = Imgproc.arcLength(cn, true);

                MatOfPoint2f approx = new MatOfPoint2f();

                // lager skew greater 0.03?
                //convert contours to readable polyagon
                Imgproc.approxPolyDP(cn, approx, 0.03 * p, true);

                //find contours with 4 points
                if (approx.toArray().Length == 4)
                {
                    MatOfPoint approxPt = new MatOfPoint();
                    approx.convertTo(approxPt, CvType.CV_32S);
                    float maxCosine  = 0;
                    float rate       = 0;
                    float min_length = 100000000000000;


                    for (int j = 2; j < 5; j++)
                    {
                        Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y));
                        Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y));

                        float v1_length = Mathf.Sqrt(v1.x * v1.x + v1.y * v1.y);
                        float v2_length = Mathf.Sqrt(v2.x * v2.x + v2.y * v2.y);

                        min_length = Mathf.Min(Mathf.Min((float)(v1_length), (float)v2_length), min_length);


                        if (v1_length > v2_length)
                        {
                            rate = v2_length / v1_length;
                        }
                        else
                        {
                            rate = v1_length / v2_length;
                        }



                        float angle = Mathf.Abs(Vector2.Angle(v1, v2));
                        maxCosine = Mathf.Max(maxCosine, angle);
                    }


                    if (min_length > 100 && maxCosine < 135f)//  && rate >= 0.6  maxCosine < 135f &&
                    {
                        tmpTargets.Add(approxPt);
                        //Debug.Log("Length -----------" + min_length);

                        //Debug.Log("------------rate" + rate + "---------------");
                    }
                }
            }
            if (tmpTargets.Count > 0)
            {
                // -----------------------DRAW RECTANGLE---------------------------
                //MatOfPoint2f approxCurve = new MatOfPoint2f();

                //for (int i = 0; i < tmpTargets.Count; i++)
                //{
                //    //Convert contours(i) from MatOfPoint to MatOfPoint2f
                //    MatOfPoint2f contour2f = new MatOfPoint2f(tmpTargets[i].toArray());
                //    //Processing on mMOP2f1 which is in type MatOfPoint2f
                //    double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02;
                //    Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true);

                //    //Convert back to MatOfPoint
                //    MatOfPoint points = new MatOfPoint(approxCurve.toArray());

                //    // Get bounding rect of contour
                //    OpenCVForUnity.Rect rect = Imgproc.boundingRect(points);

                //    // draw enclosing rectangle (all same color, but you could use variable i to make them unique)
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 130, 255), 3);
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + 5, rect.y + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y), new Point(rect.x + +rect.width + 5, rect.y + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y + rect.height), new Point(rect.x + +rect.width + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y + rect.height), new Point(rect.x + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5);

                //}
                // -----------------------DRAW RECTANGLE---------------------------



                // get the first contours

                int largestPaper = findLargestContour(tmpTargets);
                //Debug.Log(largestPaper);
                // using the largest one
                paperCornerMatOfPoint = tmpTargets[largestPaper];


                // draw boundary
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[1], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[2], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[1], paperCornerMatOfPoint.toList()[2], new Scalar(0, 255, 0), 3);

                // extract target from the frame and adjust some angle....
                Mat srcPointsMat = Converters.vector_Point_to_Mat(paperCornerMatOfPoint.toList(), CvType.CV_32F);

                List <Point> dstPoints = new List <Point>();
                dstPoints.Add(new Point(0, 0));
                dstPoints.Add(new Point(0, 300));
                dstPoints.Add(new Point(200, 300));
                dstPoints.Add(new Point(200, 0));

                Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);
                //Make perspective transform
                Mat m         = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat);
                Mat warpedMat = new Mat(mainMat.size(), mainMat.type());
                Imgproc.warpPerspective(mainMat, warpedMat, m, new Size(200, 300), Imgproc.INTER_LINEAR);
                warpedMat.convertTo(warpedMat, CvType.CV_8UC3);


                Texture2D finalTargetTextue = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false);
                Utils.matToTexture2D(warpedMat, finalTargetTextue);

                targetRawImage.texture = finalTargetTextue;
                //Debug.Log(paperCornerMatOfPoint.toList()[0].ToString() + " " + paperCornerMatOfPoint.toList()[1].ToString()+ " " + paperCornerMatOfPoint.toList()[2].ToString()+ " " + paperCornerMatOfPoint.toList()[3].ToString());
            }
            //--------------------------------------------------------


            return(mainMat);
        }
コード例 #18
0
    void Update()
    {
        if (reader != null)
        {
            MultiSourceFrame frame = reader.AcquireLatestFrame();
            if (frame != null)
            {
                using (ColorFrame colorFrame = frame.ColorFrameReference.AcquireFrame()) {
                    if (colorFrame != null)
                    {
                        colorFrame.CopyConvertedFrameDataToArray(colorData, ColorImageFormat.Rgba);
                    }
                }
                using (DepthFrame depthFrame = frame.DepthFrameReference.AcquireFrame()) {
                    if (depthFrame != null)
                    {
                        //Debug.Log ("bodyIndexFrame not null");
                        depthFrame.CopyFrameDataToArray(depthData);
                    }
                }
                using (BodyIndexFrame bodyIndexFrame = frame.BodyIndexFrameReference.AcquireFrame()) {
                    if (bodyIndexFrame != null)
                    {
                        //Debug.Log ("bodyIndexFrame not null");
                        bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
                    }
                }

                frame = null;
            }
        }
        else
        {
            return;
        }

        Utils.copyToMat(colorData, outputMat);

        Utils.copyToMat(colorData, rgbaMat);

        coordinateMapper.MapColorFrameToDepthSpace(depthData, depthSpacePoints);
        int width      = rgbaMat.width();
        int height     = rgbaMat.height();
        int depthWidth = 512;

        byte[] maskOn = new byte[] { 0 };
        for (int y = 0; y < height; y++)
        {
            for (int x = 0; x < width; x++)
            {
                int index = x + y * width;

                int tmp = ((int)depthSpacePoints [index].X + (int)depthSpacePoints [index].Y * depthWidth < 0) ? 0 : (int)depthSpacePoints [index].X + (int)depthSpacePoints [index].Y * depthWidth;

                if (bodyIndexData [tmp] == 255)
                {
                    maskData [index] = 0;
                }
                else
                {
                    maskData [index] = 255;
                }
            }
        }
        Utils.copyToMat(maskData, maskMat);


        if (mode == modeType.original)
        {
            rgbaMat.copyTo(outputMat, maskMat);

            Imgproc.putText(outputMat, "ORIGINAL MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.sepia)
        {
            Core.transform(rgbaMat, rgbaMat, sepiaKernel);

            rgbaMat.copyTo(outputMat, maskMat);

            Imgproc.putText(outputMat, "SEPIA MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.pixelize)
        {
            Imgproc.resize(rgbaMat, pixelizeIntermediateMat, pixelizeSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
            Imgproc.resize(pixelizeIntermediateMat, rgbaMat, rgbaMat.size(), 0.0, 0.0, Imgproc.INTER_NEAREST);

            rgbaMat.copyTo(outputMat, maskMat);

            Imgproc.putText(outputMat, "PIXELIZE MODE" + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.comic)
        {
            Imgproc.cvtColor(rgbaMat, comicGrayMat, Imgproc.COLOR_RGBA2GRAY);

            comicBgMat.copyTo(comicDstMat);

            Imgproc.GaussianBlur(comicGrayMat, comicLineMat, new Size(3, 3), 0);


            Utils.copyFromMat(comicGrayMat, comicGrayPixels);

            for (int i = 0; i < comicGrayPixels.Length; i++)
            {
                comicMaskPixels [i] = 0;

                if (comicGrayPixels [i] < 70)
                {
                    comicGrayPixels [i] = 0;

                    comicMaskPixels [i] = 1;
                }
                else if (70 <= comicGrayPixels [i] && comicGrayPixels [i] < 120)
                {
                    comicGrayPixels [i] = 100;
                }
                else
                {
                    comicGrayPixels [i] = 255;

                    comicMaskPixels [i] = 1;
                }
            }


            Utils.copyToMat(comicGrayPixels, comicGrayMat);

            Utils.copyToMat(comicMaskPixels, comicMaskMat);

            comicGrayMat.copyTo(comicDstMat, comicMaskMat);


            Imgproc.Canny(comicLineMat, comicLineMat, 20, 120);

            comicLineMat.copyTo(comicMaskMat);

            Core.bitwise_not(comicLineMat, comicLineMat);

            comicLineMat.copyTo(comicDstMat, comicMaskMat);


            Imgproc.cvtColor(comicDstMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);


            rgbaMat.copyTo(outputMat, maskMat);

            Imgproc.putText(outputMat, "COMIC MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }

        Utils.matToTexture(outputMat, texture);
    }
コード例 #19
0
ファイル: Scanner_Main.cs プロジェクト: garawaa/OMRScanner
// Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            frame = webCamTextureToMatHelper.GetMat();
            frame.copyTo(img_orig);

            drawing = img_orig.clone();

            int       lowThreshold = 50;// (int)200;// slider.value;
            const int ratio        = 1;
            const int kernel_size  = 3;

            Imgproc.cvtColor(img_orig, img_lab, Imgproc.COLOR_BGR2Lab);
            double omrSize = img_orig.cols() * img_orig.rows();

            Imgproc.cvtColor(img_orig, img_gray, Imgproc.COLOR_RGBA2GRAY);
            Imgproc.GaussianBlur(img_gray, img_gray, new Size(15, 15), 1.5, 1.5);       //Gaussian blur
            Imgproc.erode(img_gray, img_gray, new Mat(), new Point(-1, -1), 1);         //Erosion
                                                                                        // Imgproc.dilate(img_gray, img_gray, new Mat(), new Point(-1, -1), 10, 1, new Scalar(10));    //Dilation
            Imgproc.Canny(img_gray, img_edges, lowThreshold, lowThreshold * ratio, kernel_size, false);

            //Shape detection
            List <MatOfPoint> contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();
            Imgproc.findContours(img_edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

            //Texture2D tex = new Texture2D(img_edges.width(), img_edges.height(), TextureFormat.RGB24, false);
            //Utils.matToTexture2D(img_edges, tex);
            //byte[] bytes1 = tex.EncodeToJPG();
            //File.WriteAllBytes("D:/2019/OMR/" + "test213123.png", bytes1);

            List <MatOfPoint> hulls = new List <MatOfPoint>();

            for (int i = 0; i < contours.Count; i++)
            {
                MatOfInt hull_temp = new MatOfInt();
                Imgproc.convexHull(contours[i], hull_temp);
                int[]   arrIndex   = hull_temp.toArray();
                Point[] arrContour = contours[i].toArray();
                Point[] arrPoints  = new Point[arrIndex.Length];

                for (int k = 0; k < arrIndex.Length; k++)
                {
                    arrPoints[k] = arrContour[arrIndex[k]];
                }

                MatOfPoint temp = new MatOfPoint();
                temp.fromArray(arrPoints);

                //Filter outliers
                if (Imgproc.contourArea(temp) > omrSize / 3 && Imgproc.contourArea(temp) < (omrSize * 4) / 5)
                {
                    hulls.Add(temp);
                }
            }

            List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();
            for (int i = 0; i < hulls.Count; i++)
            {
                MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
                hull2f.Add(newPoint);
            }

            for (int i = 0; i < hulls.Count; i++)
            {
                //Approximate polygon
                MatOfPoint2f approx = new MatOfPoint2f();

                Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
                List <Point> approx_polygon = approx.toList();
                // approx_polygon = Scannerproc.filterPolygon(approx_polygon);
                // Debug.Log(approx_polygon.Count);
                if (!Scannerproc.isSquare(approx_polygon))
                {
                    continue;
                }
                else
                {
                    nowRectPoints.Clear();
                    nowRectPoints.AddRange(approx_polygon);
                    perspectiveAlign();
                }

                //Center of mass
                int cx = 0,
                    cy = 0;


                for (int k = 0; k < approx_polygon.Count; k++)
                {
                    cx += (int)approx_polygon[k].x;
                    cy += (int)approx_polygon[k].y;
                }
                cx /= approx_polygon.Count;
                cy /= approx_polygon.Count;

                Scannerproc.drawShape(drawing, approx_polygon, new Scalar(0, 255, 0));
            }

            if (showTextureOnScreen)
            {
                showCurrentTextureOnScreen();
            }
        }
    }
コード例 #20
0
    // Update is called once per frame
    void Update()
    {
        if (webCamTexture.isPlaying)
        {
            webCamTexture.GetPixels32(arr);
            baseTexture.SetPixels32(arr);
            baseTexture.Apply();


            Utils.texture2DToMat(baseTexture, mainMat);
            mainMat.copyTo(grayMat);
            Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);
            Imgproc.GaussianBlur(grayMat, grayMat, new Size(9, 9), 1);
            Imgproc.threshold(grayMat, grayMat, 110, 225, Imgproc.THRESH_BINARY);
            Imgproc.Canny(grayMat, grayMat, 20, 190);

            List <MatOfPoint> contours = new List <MatOfPoint>();
            Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            int num = 0;

            List <MatOfPoint> contours_list = new List <MatOfPoint>();
            for (int i = 0; i < contours.Count; i++)
            {
                double area = Imgproc.contourArea(contours[i]);
                if (area > 100)
                {
                    // Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4);
                    contours_list.Add(contours[i]);
                    num = num + 1;
                }
            }

            for (int i = 0; i < contours_list.Count; i++)
            {
                Imgproc.drawContours(mainMat, contours_list, -1, new Scalar(0, 255, 0), 4);
            }

            Debug.Log("Number : " + num);

            Utils.matToTexture2D(mainMat, baseTexture);
            // Utils.texture2DToMat(baseTexture, mainMat);
            // Mat dstMat = new Mat();
            // dstMat = mainMat.clone();

            // Size kSize = new Size(7d, 7d);
            // double sigmaX = 2d;
            // double sigmaY = 2d;
            // Imgproc.GaussianBlur(dstMat, dstMat, kSize, sigmaX, sigmaY);

            // Imgproc.threshold(dstMat,dstMat,110,255,Imgproc.THRESH_BINARY);

            // Imgproc.Canny(dstMat,dstMat,20,190);

            // List<MatOfPoint> srcContours = new List<MatOfPoint> ();
            // Mat srcHierarchy = new Mat ();
            // Imgproc.findContours(dstMat,srcContours, srcHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            // Debug.Log ("srcContours.Count " + srcContours.Count);

            // Utils.matToTexture2D(dstMat,baseTexture);
        }
    }
コード例 #21
0
    void Update()
    {
        if (reader != null)
        {
            ColorFrame frame = reader.AcquireLatestFrame();

            if (frame != null)
            {
                frame.CopyConvertedFrameDataToArray(data, ColorImageFormat.Rgba);

                frame.Dispose();
                frame = null;
            }
        }
        else
        {
            return;
        }

        Utils.copyToMat(data, rgbaMat);


        if (mode == modeType.original)
        {
            Core.putText(rgbaMat, "ORIGINAL MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.sepia)
        {
            Core.transform(rgbaMat, rgbaMat, sepiaKernel);

            Core.putText(rgbaMat, "SEPIA MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.pixelize)
        {
            Imgproc.resize(rgbaMat, pixelizeIntermediateMat, pixelizeSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
            Imgproc.resize(pixelizeIntermediateMat, rgbaMat, rgbaMat.size(), 0.0, 0.0, Imgproc.INTER_NEAREST);

            Core.putText(rgbaMat, "PIXELIZE MODE" + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.comic)
        {
            Imgproc.cvtColor(rgbaMat, comicGrayMat, Imgproc.COLOR_RGBA2GRAY);

            comicBgMat.copyTo(comicDstMat);

            Imgproc.GaussianBlur(comicGrayMat, comicLineMat, new Size(3, 3), 0);


            Utils.copyFromMat(comicGrayMat, comicGrayPixels);

            for (int i = 0; i < comicGrayPixels.Length; i++)
            {
                comicMaskPixels [i] = 0;

                if (comicGrayPixels [i] < 70)
                {
                    comicGrayPixels [i] = 0;

                    comicMaskPixels [i] = 1;
                }
                else if (70 <= comicGrayPixels [i] && comicGrayPixels [i] < 120)
                {
                    comicGrayPixels [i] = 100;
                }
                else
                {
                    comicGrayPixels [i] = 255;

                    comicMaskPixels [i] = 1;
                }
            }


            Utils.copyToMat(comicGrayPixels, comicGrayMat);

            Utils.copyToMat(comicMaskPixels, comicMaskMat);

            comicGrayMat.copyTo(comicDstMat, comicMaskMat);


            Imgproc.Canny(comicLineMat, comicLineMat, 20, 120);

            comicLineMat.copyTo(comicMaskMat);

            Core.bitwise_not(comicLineMat, comicLineMat);

            comicLineMat.copyTo(comicDstMat, comicMaskMat);


            Imgproc.cvtColor(comicDstMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);

            Core.putText(rgbaMat, "COMIC MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }

        Utils.matToTexture(rgbaMat, texture);
    }
コード例 #22
0
    private void HandPoseEstimationProcess(Mat rgbaMat)
    {
        // indication for making sphere coloring better
        Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);

        List <MatOfPoint> contours = detector.GetContours();

        detector.ProcessSkin(rgbaMat);
        detector.ProcessFinger(rgbaMat);

        if (contours.Count <= 0) //TODO: Add contour size
        {
            HidePlane();
            return;
        }

        if (!isHandDetected)
        {
            //Debug.Log("Contour size:" + detector.HandContourSize);
            if (detector.HandContourSize < HAND_CONTOUR_AREA_THRESHOLD)
            {
                HidePlane();
                return;
            }
            Moments moment = Imgproc.moments(detector.HandContours[0]);
            armCenter.x = moment.m10 / moment.m00;
            armCenter.y = moment.m01 / moment.m00;

            Ray res = Camera.main.ViewportPointToRay(new Vector3(((float)armCenter.x / 640.0f), ((float)armCenter.y / 640.0f), Camera.main.nearClipPlane));
            gameObject.transform.position = res.GetPoint(distanceFromCam);


            //Added without debugging!!!
            ShowPlane();
        }

        MatOfPoint2f elipseRes   = new MatOfPoint2f(detector.HandContours[0].toArray());
        RotatedRect  rotatedRect = Imgproc.fitEllipse(elipseRes);

        elipseRes.Dispose();
        armAngle          = rotatedRect.angle;
        detector.ArmAngle = armAngle;
        double line_size = 0.14;

        //The gesture is not recognized at 90 degress!
        //if (armAngle >= 90 - deltaFor90Degrees && armAngle <= 90 + deltaFor90Degrees)
        //{
        //    gameObject.GetComponent<Renderer>().enabled = true;
        //    // enable all children (buttons) renderer
        //    Renderer[] renderChildren = gameObject.GetComponentsInChildren<Renderer>();
        //    for (int i = 0; i < renderChildren.Length; ++i)
        //    {
        //        renderChildren[i].GetComponent<Renderer>().enabled = true;
        //    }

        //    Moments moment1 = Imgproc.moments(detector.HandContours[0]);
        //    armCenter.x = moment1.m10 / moment1.m00;
        //    armCenter.y = moment1.m01 / moment1.m00;

        //    Vector3 offset = CalculateNewPositionFromPicture(armCenter);
        //    Vector3 newHandPosition = gameObject.transform.position + offset - previousOffset;
        //    newHandPosition.z = 4;
        //    gameObject.transform.position = newHandPosition;

        //    gameObject.GetComponent<Transform>().rotation = Quaternion.Euler(-25, 0, 0);

        //    return;
        //}
        //else if (armAngle == 0)
        //{
        //    gameObject.GetComponent<Renderer>().enabled = false;
        //    // disable all children (buttons) renderer
        //    Renderer[] renderChildren = gameObject.GetComponentsInChildren<Renderer>();
        //    for (int i = 0; i < renderChildren.Length; ++i)
        //    {
        //        renderChildren[i].GetComponent<Renderer>().enabled = false;
        //    }

        //}

        //Debug.Log("Arm angle: " + armAngle.ToString());

        if (armAngle > 90)
        {
            armAngle -= 180;
            offset    = new Vector3((float)(-Math.Abs(line_size * Math.Sin((Math.PI / 180) * (armAngle)))),
                                    Math.Abs((float)(line_size * Math.Cos((Math.PI / 180) * (-armAngle)))), 0);
        }
        else
        {
            offset = new Vector3(Math.Abs((float)(line_size * Math.Sin((Math.PI / 180) * (-armAngle)))),
                                 Math.Abs((float)(line_size * Math.Cos((Math.PI / 180) * (-armAngle)))), 0);
        }

        Vector3 cameraRotation = (camera.GetComponent <Camera>().transform.rotation).eulerAngles;

        if (cameraRotation.y > 105 && cameraRotation.y < 260)
        {
            offset.x *= -1;
        }

        Point p = detector.NearestPoint;

        if (p.x == -1 || p.y == -1 || (detector.NearestPoint.x < 0) || !gameObject.GetComponent <Renderer>().enabled)
        {
            //cube.GetComponent<Renderer>().enabled = false;
            return;
        }

        // newPosition is the position of the finger
        Vector3 newPosition = CalculateNewPositionFromPicture(detector.NearestPoint);

        if (!didHitPlane)
        {
            return;
        }

        //cube.transform.position = newPosition;
        //cube.GetComponent<Renderer>().enabled = true;

        // first button
        Vector3 buttonPos1 = gameObject.transform.GetChild(0).position;

        newPosition.z = buttonPos1.z = 0;
        // second button
        Vector3 buttonPos2 = gameObject.transform.GetChild(1).position;
        // partical system - animation while pressing buttons

        double safeYDistance = 0.05;
        double safeXDistance = 1.0;

        if (sphereColor != null)
        {
            if ((Math.Abs(newPosition.y - buttonPos1.y) <= safeYDistance) && (Math.Abs(newPosition.x - buttonPos1.x) <= safeXDistance))
            {
                // pressing button. do something
                PressButton(Color.yellow, 0);
            }
            else if ((Math.Abs(newPosition.y - buttonPos2.y) <= safeYDistance) && Math.Abs(newPosition.x - buttonPos2.x) <= safeXDistance)
            {
                // pressing button. do something
                PressButton(Color.red, 1);
            }
        }
    }
コード例 #23
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Mat rgbaMatClipROI = new Mat(rgbaMat, processingAreaRect);

                Imgproc.cvtColor(rgbaMatClipROI, grayMat, Imgproc.COLOR_RGBA2GRAY);

                bgMat.copyTo(dstMatClippingROI);

                Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);


                grayMat.get(0, 0, grayPixels);

                for (int i = 0; i < grayPixels.Length; i++)
                {
                    maskPixels [i] = 0;

                    if (grayPixels [i] < 70)
                    {
                        grayPixels [i] = 0;
                        maskPixels [i] = 1;
                    }
                    else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                    {
                        grayPixels [i] = 100;
                    }
                    else
                    {
                        grayPixels [i] = 255;
                        maskPixels [i] = 1;
                    }
                }

                grayMat.put(0, 0, grayPixels);
                maskMat.put(0, 0, maskPixels);
                grayMat.copyTo(dstMatClippingROI, maskMat);


                Imgproc.Canny(lineMat, lineMat, 20, 120);

                lineMat.copyTo(maskMat);

                Core.bitwise_not(lineMat, lineMat);

                lineMat.copyTo(dstMatClippingROI, maskMat);


                //Imgproc.putText (dstMat, "W:" + dstMat.width () + " H:" + dstMat.height () + " SO:" + Screen.orientation, new Point (5, dstMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (0), 2, Imgproc.LINE_AA, false);

                Imgproc.cvtColor(dstMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);

                //
                //Imgproc.rectangle (rgbaMat, new Point (0, 0), new Point (rgbaMat.width (), rgbaMat.height ()), new Scalar (255, 0, 0, 255), 2);
                //Imgproc.rectangle (rgbaMat, processingAreaRect.tl(), processingAreaRect.br(), new Scalar (255, 0, 0, 255), 2);
                //

                //
                Utils.fastMatToTexture2D(rgbaMat, texture);

                rgbaMatClipROI.Dispose();
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = webCamTextureToMatHelper.GetCameraToWorldMatrix();
                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }
        }
コード例 #24
0
    void Update()
    {
        if (!running)
        {
            return;
        }


        if (Input.GetMouseButtonDown(0))
        {
            Debug.Log($"Mouse Position: {Input.mousePosition} -> World: {Camera.main.ScreenToWorldPoint(Input.mousePosition)}");
        }


        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();
            //Writes into the mat
            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGB2GRAY);     //COLOR_RGBA2GRAY
            //Applies gaussian blur for better results
            Imgproc.GaussianBlur(grayMat, grayMat, new Size(3, 3), 2);
            using (Mat circles = new Mat()) {
                //Circle detection using the hough gradient
                Imgproc.HoughCircles(grayMat, circles, Imgproc.CV_HOUGH_GRADIENT, dp, minDist, param1, param2, minRadius, maxRadius);
                Point pt = new Point();

                //Limits the circle drawing when too much circles are detected
                if ((int)circles.total() > 5)
                {
                    for (int i = 0; i < circles.rows(); i++)
                    {
                        double[] data = circles.get(i, 0);
                        pt.x = data [0];
                        pt.y = data [1];
                        double rho = data [2];
                        Imgproc.circle(rgbaMat, pt, (int)rho, GlobalValues.DETECTION_COLOR, GlobalValues.RINGS_RADIUS);
                    }
                }
                else      //Tennis ball tracking starts here
                {
                    for (int i = 0; i < circles.rows(); i++)
                    {
                        for (var j = 0; j < circles.cols(); j++)
                        {
                            //Get the data from the API
                            double[] data = circles.get(i, j);
                            pt.x = data [0];
                            pt.y = data [1];
                            double rho = data [2];

                            //Convert to worldspace
                            Vector2 pos      = new Vector2((float)data[0], webCamTextureToMatHelper.GetWebCamTexture().height - (float)data[1]);
                            Vector3 worldPos = Camera.main.ScreenToWorldPoint(AdjustToResolution(pos));

                            //Drawings for debug purposes
                            Debug.DrawRay(worldPos, Vector3.up * 10, Color.magenta, 1f);
                            Debug.DrawRay(worldPos, Vector3.down * 10, Color.magenta, 1f);
                            Debug.DrawRay(worldPos, Vector3.left * 10, Color.magenta, 1f);
                            Debug.DrawRay(worldPos, Vector3.right * 10, Color.magenta, 1f);

                            //If the ball went outside the detection threshold
                            if (ball_tracker.AwaitingForRegainFocus(worldPos))
                            {
                                //Flash a blue cirlcle to indicate the player where to start
                                if (Mathf.Sin(Time.time * GlobalValues.CHECK_POINT_BLINKING_FRECUENCY) > 0)
                                {
                                    var last_pos   = ball_tracker.GetLastPosition();
                                    var screen_pos = InvertAdjustToResolution(Camera.main.WorldToScreenPoint(last_pos));
                                    screen_pos.y = webCamTextureToMatHelper.GetWebCamTexture().height - screen_pos.y;
                                    Imgproc.circle(rgbaMat, new Point(screen_pos.x, screen_pos.y), (int)rho, GlobalValues.CHECK_POINT_COLOR, GlobalValues.RINGS_RADIUS);
                                }
                            }    //Otherwise Update the ball tracker
                            else if (ball_tracker.Update(worldPos))
                            {
                                Imgproc.circle(rgbaMat, pt, (int)rho, GlobalValues.TRACKING_COLOR, GlobalValues.RINGS_RADIUS);
                            }
                        }
                    }
                }
            }

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

            Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
        }
    }
コード例 #25
0
        //手を検出して画像に描画する
        private static void _handPoseEstimationProcess(Mat rgbaMat, Color handColor)
        {
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);

            //検出器に色を設定
            detector.setHsvColor(HGColorSpuiter.ColorToScalar(handColor));

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);
            if (contours.Count <= 0)
            {
                return;
            }

            //手の角度に傾いた外接矩形を作成
            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
            //手首までの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(WristRangeColor), 2, 8, 0);

            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //手のひらの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), HGColorSpuiter.ColorToScalar(PalmsRangeColor), 2, 8, 0);

            //折れ線カーブまたはポリゴンを,互いの距離が指定された精度以下になるように,より少ない頂点数のカーブやポリゴンで近似します
            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
            contours[boundPos] = new MatOfPoint(pointMat.toArray());

            //点とポリゴンの最短距離を計算
            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
            if (hull.toArray().Length < 3)
            {
                return;
            }
            Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);

            //手の範囲を取得
            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            //手の範囲を描画
            Imgproc.drawContours(rgbaMat, hullPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

            //指と認識した場所を取得
            List <MatOfPoint> defectPoints = new List <MatOfPoint>();
            List <Point>      listPoDefect = new List <Point>();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
                int   depth    = convexDefect.toList()[j + 3];
                if (depth > depthThreashold && farPoint.y < a)
                {
                    listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
                }
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

            //検出した指の本数を更新
            numberOfFingers = listPoDefect.Count;
            if (numberOfFingers > 5)
            {
                numberOfFingers = 5;
            }

            //指の間に点を描画
            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, HGColorSpuiter.ColorToScalar(BetweenFingersColor), -1);
            }
        }
コード例 #26
0
    private IEnumerator init()
    {
        if (webCamTexture != null)
        {
            webCamTexture.Stop();
            initDone = false;

            rgbaMat.Dispose();
            grayMat.Dispose();
            lineMat.Dispose();
            maskMat.Dispose();

            bgMat.Dispose();
        }

        // Checks how many and which cameras are available on the device
        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
        {
            if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
            {
                Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                webCamDevice = WebCamTexture.devices [cameraIndex];

                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);

                break;
            }
        }

        if (webCamTexture == null)
        {
            webCamDevice  = WebCamTexture.devices [0];
            webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
        }

        Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);


        // Starts the camera
        webCamTexture.Play();



        while (true)
        {
            //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                #if UNITY_IPHONE && !UNITY_EDITOR
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


                colors = new Color32[webCamTexture.width * webCamTexture.height];

                rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
                lineMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
                maskMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                //create a striped background.
                bgMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1, new Scalar(255));
                for (int i = 0; i < bgMat.rows() * 2.5f; i = i + 4)
                {
                    Core.line(bgMat, new Point(0, 0 + i), new Point(bgMat.cols(), -bgMat.cols() + i), new Scalar(0), 1);
                }

                dstMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                grayPixels = new byte[grayMat.cols() * grayMat.rows() * grayMat.channels()];
                maskPixels = new byte[maskMat.cols() * maskMat.rows() * maskMat.channels()];

                texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                gameObject.transform.eulerAngles = new Vector3(0, 0, 0);
                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
                                                                #endif
//								gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);

//								bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//								float scaleX = 1;
//								float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//								if (webCamTexture.videoRotationAngle == 270)
//										scaleY = -1.0f;
//								gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                Camera.main.orthographicSize = webCamTexture.width / 2;
                                                                #else
                Camera.main.orthographicSize = webCamTexture.height / 2;
                                                                #endif

                initDone = true;

                break;
            }
            else
            {
                yield return(0);
            }
        }
    }

    // Update is called once per frame
    void Update()
    {
        if (!initDone)
        {
            return;
        }

                                #if UNITY_IPHONE && !UNITY_EDITOR
        if (webCamTexture.width > 16 && webCamTexture.height > 16)
        {
                                #else
        if (webCamTexture.didUpdateThisFrame)
        {
                                                #endif

            Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

            if (webCamTexture.videoVerticallyMirrored)
            {
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 90)
                    {
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }
            }
            else
            {
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 90)
                    {
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }
            }


            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

//						Utils.webCamTextureToMat (webCamTexture, grayMat, colors);


            bgMat.copyTo(dstMat);


            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);



            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;

                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;

                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;

                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);

            maskMat.put(0, 0, maskPixels);

            grayMat.copyTo(dstMat, maskMat);



            Imgproc.Canny(lineMat, lineMat, 20, 120);

            lineMat.copyTo(maskMat);

            Core.bitwise_not(lineMat, lineMat);

            lineMat.copyTo(dstMat, maskMat);


//		Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA);
//				Utils.matToTexture2D (rgbaMat, texture);

            Utils.matToTexture2D(dstMat, texture, colors);
        }
    }

    void OnDisable()
    {
        webCamTexture.Stop();
    }

    void OnGUI()
    {
        float     screenScale  = Screen.width / 240.0f;
        Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

        GUI.matrix = scaledMatrix;


        GUILayout.BeginVertical();
        if (GUILayout.Button("back"))
        {
            Application.LoadLevel("OpenCVForUnitySample");
        }
        if (GUILayout.Button("change camera"))
        {
            isFrontFacing = !isFrontFacing;
            StartCoroutine(init());
        }


        GUILayout.EndVertical();
    }
}
コード例 #27
0
    // Update is called once per frame
    void Update()
    {
        if (!initDone)
        {
            return;
        }

        if (screenOrientation != Screen.orientation)
        {
            screenOrientation = Screen.orientation;
            updateLayout();
        }

                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
        if (webCamTexture.width > 16 && webCamTexture.height > 16)
        {
                                #else
        if (webCamTexture.didUpdateThisFrame)
        {
                                                #endif

            Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

            //flip to correct direction.
            if (webCamDevice.isFrontFacing)
            {
                if (webCamTexture.videoRotationAngle == 0)
                {
                    Core.flip(rgbaMat, rgbaMat, 1);
                }
                else if (webCamTexture.videoRotationAngle == 90)
                {
                    Core.flip(rgbaMat, rgbaMat, 0);
                }
                if (webCamTexture.videoRotationAngle == 180)
                {
                    Core.flip(rgbaMat, rgbaMat, 0);
                }
                else if (webCamTexture.videoRotationAngle == 270)
                {
                    Core.flip(rgbaMat, rgbaMat, 1);
                }
            }
            else
            {
                if (webCamTexture.videoRotationAngle == 180)
                {
                    Core.flip(rgbaMat, rgbaMat, -1);
                }
                else if (webCamTexture.videoRotationAngle == 270)
                {
                    Core.flip(rgbaMat, rgbaMat, -1);
                }
            }

            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

//						Utils.webCamTextureToMat (webCamTexture, grayMat, colors);


            bgMat.copyTo(dstMat);


            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);



            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;

                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;

                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;

                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);

            maskMat.put(0, 0, maskPixels);

            grayMat.copyTo(dstMat, maskMat);



            Imgproc.Canny(lineMat, lineMat, 20, 120);

            lineMat.copyTo(maskMat);

            Core.bitwise_not(lineMat, lineMat);

            lineMat.copyTo(dstMat, maskMat);


//		Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA);
//				Utils.matToTexture2D (rgbaMat, texture);

            Utils.matToTexture2D(dstMat, texture, colors);
        }
    }

    void OnDisable()
    {
        webCamTexture.Stop();
    }

    void OnGUI()
    {
        float     screenScale  = Screen.height / 240.0f;
        Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

        GUI.matrix = scaledMatrix;


        GUILayout.BeginVertical();
        if (GUILayout.Button("back"))
        {
            Application.LoadLevel("OpenCVForUnitySample");
        }
        if (GUILayout.Button("change camera"))
        {
            shouldUseFrontFacing = !shouldUseFrontFacing;
            StartCoroutine(init());
        }


        GUILayout.EndVertical();
    }
}
コード例 #28
0
    // Update is called once per frame
    void Update()
    {
        if (!initDone)
        {
            return;
        }

        if (webCamTexture.didUpdateThisFrame)
        {
//				Utils.WebCamTextureToMat (webCamTexture, rgbaMat, colors);
//				Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

            Utils.WebCamTextureToMat(webCamTexture, grayMat, colors);


            bgMat.copyTo(dstMat);


            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);



            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;

                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;

                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;

                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);

            maskMat.put(0, 0, maskPixels);

            grayMat.copyTo(dstMat, maskMat);



            Imgproc.Canny(lineMat, lineMat, 20, 120);

            lineMat.copyTo(maskMat);

            Core.bitwise_not(lineMat, lineMat);

            lineMat.copyTo(dstMat, maskMat);



//		Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA);
//				Utils.matToTexture2D (rgbaMat, texture);

            Utils.matToTexture2D(dstMat, texture, colors);
        }
    }
コード例 #29
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // change the color space to YUV.
                Imgproc.cvtColor(rgbaMat, yuvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(yuvMat, yuvMat, Imgproc.COLOR_RGB2YUV);
                // grap only the Y component.
                Core.extractChannel(yuvMat, yMat, 0);

                // blur the image to reduce high frequency noises.
                Imgproc.GaussianBlur(yMat, yMat, new Size(3, 3), 0);
                // find edges in the image.
                Imgproc.Canny(yMat, yMat, 50, 200, 3);

                // find contours.
                List <MatOfPoint> contours = new List <MatOfPoint>();
                Find4PointContours(yMat, contours);

                // pick the contour of the largest area and rearrange the points in a consistent order.
                MatOfPoint maxAreaContour = GetMaxAreaContour(contours);
                maxAreaContour = OrderCornerPoints(maxAreaContour);

                bool found = (maxAreaContour.size().area() > 0);
                if (found)
                {
                    // trasform the prospective of original image.
                    using (Mat transformedMat = PerspectiveTransform(rgbaMat, maxAreaContour))
                    {
                        outputDisplayAreaMat.setTo(new Scalar(0, 0, 0, 255));

                        if (transformedMat.width() <= outputDisplayAreaMat.width() && transformedMat.height() <= outputDisplayAreaMat.height() &&
                            transformedMat.total() >= outputDisplayAreaMat.total() / 16)
                        {
                            int x = outputDisplayAreaMat.width() / 2 - transformedMat.width() / 2;
                            int y = outputDisplayAreaMat.height() / 2 - transformedMat.height() / 2;
                            using (Mat dstAreaMat = new Mat(outputDisplayAreaMat, new OpenCVForUnity.CoreModule.Rect(x, y, transformedMat.width(), transformedMat.height())))
                            {
                                transformedMat.copyTo(dstAreaMat);
                            }
                        }
                    }
                }

                if (isDebugMode)
                {
                    // draw edge image.
                    Imgproc.cvtColor(yMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);

                    // draw all found conours.
                    Imgproc.drawContours(rgbaMat, contours, -1, DEBUG_CONTOUR_COLOR, 1);
                }

                if (found)
                {
                    // draw max area contour.
                    Imgproc.drawContours(rgbaMat, new List <MatOfPoint> {
                        maxAreaContour
                    }, -1, CONTOUR_COLOR, 2);

                    if (isDebugMode)
                    {
                        // draw corner numbers.
                        for (int i = 0; i < maxAreaContour.toArray().Length; i++)
                        {
                            var pt = maxAreaContour.get(i, 0);
                            Imgproc.putText(rgbaMat, i.ToString(), new Point(pt[0], pt[1]), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, DEBUG_CORNER_NUMBER_COLOR, 1, Imgproc.LINE_AA, false);
                        }
                    }
                }

                rgbaMat.copyTo(inputDisplayAreaMat);

                Utils.fastMatToTexture2D(displayMat, texture, true, 0, true);
            }
        }
コード例 #30
0
    // Update is called once per frame
    void Update()
    {
        Resources.UnloadUnusedAssets(); //Fixes the memory leak

        //Get new picture from camera
        imgTexture = new Texture2D(webcamTexture.width, webcamTexture.height);
        imgTexture.SetPixels(webcamTexture.GetPixels());
        imgTexture.Apply();

        Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imgTexture, imgMat);

        Mat maskMat   = new Mat();
        Mat maskMatOP = new Mat();

        Mat grayMat = new Mat();

        Imgproc.dilate(imgMat, imgMat, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(1, 1)));

        //Grayscale the picture
        Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGB2GRAY);

        //Blur the picture
        Imgproc.GaussianBlur(grayMat, grayMat, new Size(3, 3), 1);

        Imgproc.equalizeHist(grayMat, grayMat);

        //Find Edges
        Mat edgesOfPicture = new Mat();

        Imgproc.Canny(grayMat, edgesOfPicture, 75, 225);

        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(edgesOfPicture, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);



        MatOfPoint2f matOfPoint2f = new MatOfPoint2f();
        MatOfPoint2f approxCurve  = new MatOfPoint2f();
        List <Rect>  rectPre      = new List <Rect>();
        List <Rect>  rectAfter    = new List <Rect>();


        try
        {
            List <MatOfPoint2f> kvadrater = new List <MatOfPoint2f>();
            for (int idx = 0; idx >= 0; idx = (int)hierarchy.get(0, idx)[0])
            {
                MatOfPoint contour     = contours[idx];
                Rect       rect        = Imgproc.boundingRect(contour);
                double     contourArea = Imgproc.contourArea(contour);
                matOfPoint2f.fromList(contour.toList());

                Imgproc.approxPolyDP(matOfPoint2f, approxCurve, Imgproc.arcLength(matOfPoint2f, true) * 0.02, true);
                long total = approxCurve.total();


                if (total > 0)
                {
                    kvadrater.Add(approxCurve);
                    ArrayList cos    = new ArrayList();
                    Point[]   points = approxCurve.toArray();

                    for (int j = 2; j < total + 1; j++)
                    {
                        cos.Add(angle(points[(int)(j % total)], points[j - 2], points[j - 1]));
                    }

                    cos.Sort();
                    Double minCos = (Double)cos[0];
                    Double maxCos = (Double)cos[cos.Count - 1];
                    bool   isRect = total == 4 && minCos >= -0.1 && maxCos <= 0.3;
                    //List<double[]> Colors = new List<double[]>();



                    if (isRect)
                    {
                        if (rect.width > 20)
                        {
                            rectPre.Add(rect);
                        }
                        List <Color>    Colors       = new List <Color>();
                        List <double[]> colorDoubles = new List <double[]>();
                        for (int op = 0; op < 9; op++)
                        {
                            if (rectPre.Count == 9)
                            {
                                // print("Pre verify: " + rectPre.ToString());
                                //rectPre = CoordinateVerifier.Verify(rectPre); Använd inte LINQ !! ! ! ! !
                                // print("After verify: " + rectPre.ToString());
                                var punkt = imgTexture.GetPixel(rect.x + (rect.width / 2), rect.y + (rect.height / 2));
                                Imgproc.putText(imgMat, op.ToString(), new Point(rectPre[op].x + 20, rectPre[op].y + 30), Core.FONT_HERSHEY_DUPLEX, 3, new Scalar(200));
                                Rgb rgb = new Rgb(punkt.r, punkt.g, punkt.b);
                                // print("rect[" + op + "] was found at" + rect.x.ToString() + "and y: " + rect.y.ToString());
                                var    hsv  = rgb.To <Hsv>();
                                String farg = "Ingen farg";

                                if (hsv.H >= 45 && hsv.H <= 70)
                                {
                                    farg = "Gul";
                                }
                                if (hsv.H >= 10 && hsv.H <= 45)
                                {
                                    farg = "Orange";
                                }

                                // print(farg);
                                Colors.Clear();
                                for (int q = 0; q < rectPre.Count; q++)
                                {
                                    Color[] blockOfColour = imgTexture.GetPixels(rectPre[q].x + (rectPre[q].width / 2), rectPre[q].y + (rectPre[q].height / 2), rectPre[q].width / 3, rectPre[q].height / 3, 0);

                                    float r = 0, g = 0, b = 0;
                                    foreach (Color pixelBlock in blockOfColour)
                                    {
                                        r += pixelBlock.r;
                                        g += pixelBlock.g;
                                        b += pixelBlock.b;
                                    }
                                    r = r / blockOfColour.Length;
                                    g = g / blockOfColour.Length;
                                    b = b / blockOfColour.Length;

                                    var eColor = _colorDetection.ColorEnumFromScalarColor(new double[] { r * 255, g * 255, b * 255 });
                                    var color  = ColorDetection.UnityColorFromEnum(eColor);
                                    Colors.Add(color);
                                }

                                if (Colors.Count == 9)
                                {
                                    ColorTracker.Instance.addToTemp(Colors);
                                    foreach (Color c in Colors)
                                    {
                                        // print(c.ToString());
                                    }
                                }
                            }
                        }
                        Imgproc.drawContours(imgMat, contours, idx, new Scalar(255, 100, 155), 4);
                    }
                }
            }
        }
        catch (ArgumentOutOfRangeException e)
        {
        }

        Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(imgMat, texture);
        gameObject.GetComponent <Renderer>().material.mainTexture = texture;
    }