Example #1
0
    void Update()
    {
        mycap.Play();                                               //开启摄像头
        Utils.webCamTextureToMat(mycap, tu);                        //将摄像头拍摄的场景转化为图片信息。
        Imgproc.cvtColor(tu, a, 37);                                //将RGB空间转化为YUV空间
        Core.split(a, c);                                           //将YUV空间分开, 存到变量C中。
        b = new Mat(480, 640, CvType.CV_8UC1);                      //定义一个单通道的图片变量。
        b = c[1];                                                   //将U通道赋值给b变量,到此就完成了提取U通道。
        Imgproc.threshold(b, b, fa, 255, Imgproc.THRESH_BINARY);    //将b图像转化为二值图
        //Imgproc.threshold(b, b, fa, 255, Imgproc.THRESH_BINARY_INV);
        b.copyTo(b1);                                               //将U通道的信息拷贝给b1变量
        List <MatOfPoint> contours = new List <MatOfPoint>();       //定义一个Matofpoint类型的变量数组,存放图像轮廓信息

        Imgproc.findContours(b, contours, hier, 0, 2);              //寻找b图像中的轮廓,存储到contous变量数组中。
        ////0 1 2 3
        s = 0;                                                      //轮廓面积变量
        x = 0; y = 480;                                             //轮廓的边缘点的坐标变量
        List <MatOfPoint> selectcol = new List <MatOfPoint>();      //定义一个数组,用来存放手势轮廓

        foreach (MatOfPoint i in contours)                          //循环所有轮廓,找出面积大于5000的轮廓
        {
            if (Mathf.Abs((float)Imgproc.contourArea(i)) > 5000)
            {
                selectcol.Add(i);                                      //将轮廓面积大于5000的变量存到selectcol数组中
            }
        }
        if (selectcol.Count == 1)                                      //如果只有一个轮廓,就是手型轮廓
        {
            s     = Mathf.Abs((float)Imgproc.contourArea(selectcol[0]));
            rect1 = Imgproc.boundingRect(selectcol[0]);                //求出轮廓的手型边缘矩形框
            Point[] dian = selectcol[0].toArray();                     //提取轮廓点
            foreach (Point dian1 in dian)
            {
                if (dian1.y < y)                                        //选出轮廓的最低点
                {
                    y = dian1.y;
                    x = dian1.x;
                }
            }
        }
        else if (selectcol.Count == 2)                                  //如果两个轮廓,选择下方的轮廓作为手型轮廓
        {
            if (selectcol[0].toArray()[0].y < selectcol[1].toArray()[0].y)
            {
                s     = Mathf.Abs((float)Imgproc.contourArea(selectcol[0]));
                rect1 = Imgproc.boundingRect(selectcol[0]);
                Point[] dian = selectcol[0].toArray();
                foreach (Point dian1 in dian)
                {
                    if (dian1.y < y)                                     //选出轮廓的最低点
                    {
                        y = dian1.y;
                        x = dian1.x;
                    }
                }
            }
            else                                                           //轮廓较多的情况时,固定x,y,表示没有找到
            {
                s     = Mathf.Abs((float)Imgproc.contourArea(selectcol[1]));
                rect1 = Imgproc.boundingRect(selectcol[1]);
                Point[] dian = selectcol[1].toArray();
                foreach (Point dian1 in dian)
                {
                    if (dian1.y < y)
                    {
                        y = dian1.y;
                        x = dian1.x;
                    }
                }
            }
        }
        else
        {
            x = 320; y = 200;
        }
        //    Debug.Log(selectcol.Count);
        Imgproc.rectangle(b1, rect1.tl(), rect1.br(), color);          //在图像b1上画出矩形框
        Mat jie = new Mat(b1, rect1);                                  //截取图像b1内画出的矩形框

        Imgproc.resize(jie, shishi, size);                             //调整图像的大小,用来在UI界面中显示
    }
        // Update is called once per frame
        void Update()
        {
            #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
            //Touch
            int touchCount = Input.touchCount;
            if (touchCount == 1)
            {
                Touch t = Input.GetTouch(0);
                if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId))
                {
                    storedTouchPoint = new Point(t.position.x, t.position.y);
                    //Debug.Log ("touch X " + t.position.x);
                    //Debug.Log ("touch Y " + t.position.y);
                }
            }
            #else
            //Mouse
            if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject())
            {
                storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y);
                //Debug.Log ("mouse X " + Input.mousePosition.x);
                //Debug.Log ("mouse Y " + Input.mousePosition.y);
            }
            #endif

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                if (storedTouchPoint != null)
                {
                    ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbaMat.cols(), rgbaMat.rows());
                    OnTouch(rgbaMat, storedTouchPoint);
                    storedTouchPoint = null;
                }

                Point[] points = roiPointList.ToArray();

                if (shouldStartCamShift)
                {
                    shouldStartCamShift = false;

                    using (MatOfPoint roiPointMat = new MatOfPoint(points)) {
                        roiRect = Imgproc.boundingRect(roiPointMat);
                    }

                    if (roiHistMat != null)
                    {
                        roiHistMat.Dispose();
                        roiHistMat = null;
                    }
                    roiHistMat = new Mat();

                    using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                        using (Mat maskMat = new Mat()) {
                            Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                            Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                            //Debug.Log ("roiHist " + roiHistMat.ToString ());
                        }
                }
                else if (points.Length == 4)
                {
                    using (Mat backProj = new Mat()) {
                        Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Imgproc.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Imgproc.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }

                    Imgproc.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Example #3
0
        void Update()
        {
            Texture2D imgTexture = Resources.Load("vcl_kd_WITH_LOADING_CONTROL") as Texture2D;
            Mat       imgMat     = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, imgMat);
            Mat grayMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGB2GRAY);              //grayscale
            Mat gray2Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Core.bitwise_not(grayMat, gray2Mat);
            Mat gray3Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Imgproc.threshold(gray2Mat, gray3Mat, thresh, maxval, Imgproc.THRESH_BINARY_INV | Imgproc.THRESH_OTSU);
            //Imgproc.threshold (gray2Mat, gray2Mat, thresh, maxval, Imgproc.THRESH_BINARY);

            Core.bitwise_not(gray3Mat, gray3Mat);
            Mat procMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Core.bitwise_and(gray2Mat, gray3Mat, procMat);             //multiply the mask of foreground by the original image to obtain 0 background and original values in foreground
            Mat labels    = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
            Mat stats     = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
            Mat centroids = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
            int total     = Imgproc.connectedComponentsWithStats(gray3Mat, labels, stats, centroids);

            for (int i = 1; i < total; ++i)
            {
                int xx = (int)centroids.get(i, 0) [0];
                int yy = (int)centroids.get(i, 1) [0];

                Imgproc.circle(procMat, new Point(xx, yy), 3, new Scalar(255, 255, 0), -1);

                int x      = (int)stats.get(i, Imgproc.CC_STAT_LEFT) [0];
                int y      = (int)stats.get(i, Imgproc.CC_STAT_TOP) [0];
                int height = (int)stats.get(i, Imgproc.CC_STAT_HEIGHT) [0];
                int width  = (int)stats.get(i, Imgproc.CC_STAT_WIDTH) [0];
                int area   = (int)stats.get(i, Imgproc.CC_STAT_AREA) [0];

                if (area > 30)
                {
                    OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(x, y, width, height);
                    Imgproc.rectangle(procMat, rect.tl(), rect.br(), new Scalar(255, 255, 0), 2);

                    Mat    blotMat = imgMat.submat(rect);
                    Scalar meanVa  = Core.mean(blotMat);
                    int    meanV   = (int)meanVa.val [0];
                    double blotV   = 0;
                    Imgproc.putText(procMat, "I" + i + "/" + (area * meanV), new Point(x, y - 50), Core.FONT_HERSHEY_PLAIN, 1, new Scalar(255, 122, 0), 2);
                }
            }

            Mat hierarchy = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
            List <MatOfPoint> blotContours = new List <MatOfPoint>();

            Imgproc.findContours(gray3Mat, blotContours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(procMat, texture);

            GetComponent <RawImage> ().texture = (Texture2D)texture;
        }
Example #4
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);


                Point[] points = roiPointList.ToArray();

                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat()) {
                        Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                    }

                                                                                #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        if (Input.GetTouch(0).phase == TouchPhase.Ended)
                        {
                            roiPointList.Clear();
                        }
                    }
                                                                                #else
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Clear();
                    }
                                                                                #endif
                }


                if (roiPointList.Count < 4)
                {
                                                                                #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        Touch t = Input.GetTouch(0);
                        if (t.phase == TouchPhase.Ended)
                        {
                            roiPointList.Add(convertScreenPoint(new Point(t.position.x, t.position.y), gameObject, Camera.main));
                            //									Debug.Log ("touch X " + t.position.x);
                            //									Debug.Log ("touch Y " + t.position.y);

                            if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                            {
                                roiPointList.RemoveAt(roiPointList.Count - 1);
                            }
                        }
                    }
                                                                                #else
                    //Mouse
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //												Debug.Log ("mouse X " + Input.mousePosition.x);
                        //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }
                                                                                #endif


                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray())) {
                            roiRect = Imgproc.boundingRect(roiPointMat);
                        }


                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat()) {
                                Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                                //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Core.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Core.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }

                    Core.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

                Core.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);


//				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
#else
            if (webCamTexture.didUpdateThisFrame)
            {
#endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);


                //flip to correct direction.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }


                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);


                Point[] points = roiPointList.ToArray();

                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat())
                    {
                        Imgproc.calcBackProject(new List <Mat>(new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                    }

#if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        if (Input.GetTouch(0).phase == TouchPhase.Ended)
                        {
                            roiPointList.Clear();
                        }
                    }
#else
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Clear();
                    }
#endif
                }


                if (roiPointList.Count < 4)
                {
#if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        Touch t = Input.GetTouch(0);
                        if (t.phase == TouchPhase.Ended)
                        {
                            roiPointList.Add(convertScreenPoint(new Point(t.position.x, t.position.y), gameObject, Camera.main));
//									Debug.Log ("touch X " + t.position.x);
//									Debug.Log ("touch Y " + t.position.y);

                            if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                            {
                                roiPointList.RemoveAt(roiPointList.Count - 1);
                            }
                        }
                    }
#else
                    //Mouse
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //												Debug.Log ("mouse X " + Input.mousePosition.x);
                        //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList[roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }
#endif


                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray()))
                        {
                            roiRect = Imgproc.boundingRect(roiPointMat);
                        }


                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat())
                            {
                                Imgproc.calcHist(new List <Mat>(new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                                //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Imgproc.circle(rgbaMat, points[i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Imgproc.line(rgbaMat, points[i], points[(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }

                    Imgproc.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

                Imgproc.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);


                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer>().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.height / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("OpenCVForUnitySample");
            }
            if (GUILayout.Button("change camera"))
            {
                shouldUseFrontFacing = !shouldUseFrontFacing;
                StartCoroutine(init());
            }


            GUILayout.EndVertical();
        }
Example #6
0
    void _Process()
    {
        binaryMatCreator = new BinaryMatCreator();
        binaryMatCreator.setCrUpper(cr_threshold_upper);
        binaryMatCreator.setCrLower(cr_threshold_lower);
        binaryMatCreator.setSUpper(s_threshold_upper);
        binaryMatCreator.setSLower(s_threshold_lower);
        binaryMatCreator.setVUpper(v_threshold_upper);
        binaryMatCreator.setVLower(v_threshold_lower);

        Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
        Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);
        Mat cameraMat   = rgbMat;
        int imageWidth  = cameraMat.width();
        int imageHeight = cameraMat.height();

        var hSVChannels   = ARUtil.getHSVChannels(cameraMat);
        var yCrCbChannels = ARUtil.getYCrCbChannels(cameraMat);

        OpenCVForUnity.Rect searchRect = new OpenCVForUnity.Rect(0, 0, cameraMat.cols(), cameraMat.rows());


        // 領域判定のスコア
        scorer = new MaguroScorer(searchRect);


        try
        {
            // カメラ入力画像から, searchRectサイズの二値画像を生成
            Mat binaryROIMat = binaryMatCreator.createBinaryMat(cameraMat, searchRect);

            // 二値画像&探索領域矩形で輪郭探索
            var contours = ARUtil.findContours(binaryROIMat, searchRect.tl());

            // 領域候補群を作成 -> 候補にスコアをつける -> 最大スコアの候補を取得
            var regionSet = new RegionCandidateSet(contours)
                            .elliminateByArea(searchRect, 0.01, 0.9)
                            .score(scorer)
                            .sort();

            if (regionSet.candidates.Count == 0)
            {
                print("first candidates is 0");
                goto show;
            }

            var count   = 0;
            var regions = new List <Region>();
            foreach (var candidate in regionSet.candidates)
            {
                if (count > 2)
                {
                    break;
                }
                if (candidate == null)
                {
                    print("candite is null");
                    break;
                }
                // 領域作成
                foodRegion = new Region(candidate, cameraMat);
                regions.Add(foodRegion);
                count++;
            }


            //var regions = new List<Region>();
            //var cr_refined_threshold = 125;

            //foreach (var candidate in regionSet.candidates)
            //{

            //    // 領域作成
            //    var region = new Region(candidate, cameraMat);
            //    var refinedRegion = region.createRefienedRegion(yCrCbChannels[1], cr_refined_threshold);
            //    if (refinedRegion != null)
            //    {
            //        regions.Add(refinedRegion);
            //    }
            //}

            //var filteredRegions = Region.elliminateByInclusionRect(regions);


            // 食品領域に貼りつけるテクスチャ作成
            var texture = _textureCreator.create(cameraMat, regions);

            if (texture == null)
            {
                print("regions is empty");
                goto show;
            }

            Mat alphaMask = Mat.zeros(cameraMat.size(), CvType.CV_8U);
            foreach (var region in regions)
            {
                Core.add(region.mask, alphaMask, alphaMask);
            }

            if (willChange)
            {
                _textureCreator.alphaBlend(cameraMat, texture, alphaMask, alpha);
            }

            if (shouldDrawRect)
            {
                // foodRegion.drawRect(matForDeveloper);
                Imgproc.rectangle(cameraMat, searchRect.tl(), searchRect.br(), new Scalar(0, 0, 255), 3);
            }
        }
        catch (System.Exception e)
        {
            print(e);
            goto show;
        }

show:
        outputScreenQuad.setMat(cameraMat);
    }
Example #7
0
    public void getAnswerNumber(Mat align)
    {
        Mat align_gray = new Mat(), align_edges = new Mat();

        Imgproc.cvtColor(align, align_gray, Imgproc.COLOR_RGB2GRAY);
        Imgproc.Canny(align_gray, align_edges, 50, 50);
        Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2 + 1, 2 + 1), new Point(1, 1));

        Imgproc.dilate(align_edges, align_edges, element);


        //Shape detection
        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(align_edges, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

        List <MatOfPoint> hulls = new List <MatOfPoint>();

        //Texture2D tex = new Texture2D(align_edges.width(), align_edges.height(), TextureFormat.RGB24, false);
        //Utils.matToTexture2D(align_edges, tex);
        //byte[] bytes1 = tex.EncodeToJPG();
        //File.WriteAllBytes("D:/2019/OMR/" + "test.png", bytes1);

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfInt hull_temp = new MatOfInt();
            Imgproc.convexHull(contours[i], hull_temp);
            int[]   arrIndex   = hull_temp.toArray();
            Point[] arrContour = contours[i].toArray();
            Point[] arrPoints  = new Point[arrIndex.Length];

            for (int k = 0; k < arrIndex.Length; k++)
            {
                arrPoints[k] = arrContour[arrIndex[k]];
            }

            MatOfPoint temp = new MatOfPoint();
            temp.fromArray(arrPoints);

            //Filter outliers
            if (Imgproc.contourArea(temp) > 90000 && Imgproc.contourArea(temp) < 110000)
            {
                hulls.Add(temp);
            }
        }

        List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();

        for (int i = 0; i < hulls.Count; i++)
        {
            MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
            hull2f.Add(newPoint);
        }

        List <Rect> rects = new List <Rect>();

        for (int i = 0; i < hulls.Count; i++)
        {
            //Approximate polygon
            MatOfPoint2f approx = new MatOfPoint2f();
            Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
            List <Point> approx_polygon = approx.toList();
            approx_polygon = Scannerproc.filterPolygon(approx_polygon);
            double area = Imgproc.contourArea(approx);

            if (Scannerproc.isSquare(approx_polygon))
            {
                Rect r         = Imgproc.boundingRect(new MatOfPoint(approx_polygon.ToArray()));
                bool isContain = false;
                for (int k = 0; k < rects.Count; k++)
                {
                    if (Scannerproc.distanceTwoPoints(rects[k].tl(), r.tl()) < 100)
                    {
                        //if (rects[k].contains(r) || r.contains(rects[k]))
                        isContain = true;
                    }
                }

                if (!isContain)
                {
                    rects.Add(r);
                    // Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(255, 0, 0, 255), 3);

                    for (int j = 1; j < 21; j++)
                    {
                        Rect roi = new Rect((int)r.tl().x + (int)((r.width * 1.3) / 6), (int)r.tl().y + (r.height / 21) * j, (int)((r.width * 4.7) / 6), r.height / 21);
                        int  num = getAnswerNumber(align, roi);
                        if (num != 0)
                        {
                            Imgproc.putText(align, " " + num, new Point(roi.x - 40, roi.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false);
                            Imgproc.rectangle(align, roi.tl(), roi.br(), new Scalar(0, 255, 0, 255), 2);
                        }
                    }
                }
            }

            //Center of mass
            int cx = 0,
                cy = 0;
            for (int k = 0; k < approx_polygon.Count; k++)
            {
                cx += (int)approx_polygon[k].x;
                cy += (int)approx_polygon[k].y;
            }
            cx /= approx_polygon.Count;
            cy /= approx_polygon.Count;

            // Imgproc.circle(roi, new Point(cx, cy), 5, new Scalar(255), -1);
        }

        if (rects.Count == 4)
        {
            nowDetected = false;
        }
    }
Example #8
0
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            Mat bgraMatClipROI = new Mat(bgraMat, processingAreaRect);

            bgraMatClipROI.copyTo(processingAreaMat);


            // fill all black.
            Imgproc.rectangle(bgraMat, new Point(0, 0), new Point(bgraMat.width(), bgraMat.height()), new Scalar(0, 0, 0, 0), -1);


            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, processingAreaMat);

            // detect faces.
            List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();

            if (useDlibFaceDetecter)
            {
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                // convert image to greyscale.
                Imgproc.cvtColor(processingAreaMat, grayMat, Imgproc.COLOR_BGRA2GRAY);

                Imgproc.equalizeHist(grayMat, grayMat);

                cascade.detectMultiScale(grayMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(grayMat.cols() * 0.15, grayMat.cols() * 0.15), new Size());

                detectResult = faces.toList();

                // Adjust to Dilb's result.
                foreach (OpenCVForUnity.Rect r in detectResult)
                {
                    r.y += (int)(r.height * 0.1f);
                }
            }


            foreach (var rect in detectResult)
            {
                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                //draw landmark points
                OpenCVForUnityUtils.DrawFaceLandmark(bgraMatClipROI, points, new Scalar(0, 255, 0, 255), 2);

                //draw face rect
                OpenCVForUnityUtils.DrawFaceRect(bgraMatClipROI, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(0, 0, 255, 255), 2);
            }

            Imgproc.putText(bgraMatClipROI, "W:" + bgraMatClipROI.width() + " H:" + bgraMatClipROI.height(), new Point(5, bgraMatClipROI.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 255, 255), 1, Imgproc.LINE_AA, false);

            Imgproc.rectangle(bgraMat, new Point(0, 0), new Point(bgraMat.width(), bgraMat.height()), new Scalar(0, 0, 255, 255), 2);

            // Draw prosessing area rectangle.
            Imgproc.rectangle(bgraMat, processingAreaRect.tl(), processingAreaRect.br(), new Scalar(0, 255, 255, 255), 2);

            bgraMatClipROI.Dispose();


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                OpenCVForUnity.Utils.fastMatToTexture2D(bgraMat, texture);
                bgraMat.Dispose();

                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }, false);
        }
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);

//                      Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours [boundPos].toArray()));
            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

//                      Debug.Log (
//                      " Row start [" +
//                              (int)boundRect.tl ().y + "] row end [" +
//                              (int)boundRect.br ().y + "] Col start [" +
//                              (int)boundRect.tl ().x + "] Col end [" +
//                              (int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

//                      Debug.Log (
//                      " A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
            Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours [boundPos].toArray()), pointMat, 3, true);
            contours [boundPos] = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours [boundPos].toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contours [boundPos].toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();
            List <Point>      listPo     = new List <Point> ();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours [boundPos].toList() [hull.toList() [j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <MatOfPoint> defectPoints = new List <MatOfPoint> ();
            List <Point>      listPoDefect = new List <Point> ();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours [boundPos].toList() [convexDefect.toList() [j + 2]];
                int   depth    = convexDefect.toList() [j + 3];
                if (depth > threasholdSlider.value && farPoint.y < a)
                {
                    listPoDefect.Add(contours [boundPos].toList() [convexDefect.toList() [j + 2]]);
                }
//                              Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

//                      Debug.Log ("hull: " + hull.toList ());
//                      Debug.Log ("defects: " + convexDefect.toList ());

            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

//                      int defectsTotal = (int)convexDefect.total();
//                      Debug.Log ("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

//                      Debug.Log ("numberOfFingers " + numberOfFingers);

//                      Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Example #10
0
    private OpenCVForUnity.Rect BgSub()
    {
        backgroundSubstractorMOG2.apply(rgbMat, fgmaskMat);

        roiRect      = null;
        fgmaskMatRoi = fgmaskMat;

        Mat kernelD = new Mat(40, 40, CvType.CV_8UC1, new Scalar(255, 255, 255));
        Mat kernelE = new Mat(20, 20, CvType.CV_8UC1, new Scalar(255, 255, 255));

        Mat kernelDRoi = new Mat(1, 1, CvType.CV_8UC1, new Scalar(255, 255, 255));
        Mat kernelERoi = new Mat(1, 1, CvType.CV_8UC1, new Scalar(255, 255, 255));

        Imgproc.dilate(fgmaskMat, fgmaskMatDilate, kernelD);
        Imgproc.erode(fgmaskMatDilate, fgmaskMatDilate, kernelE);

        Imgproc.dilate(fgmaskMatRoi, fgmaskMatDilateRoi, kernelDRoi);
        Imgproc.erode(fgmaskMatDilateRoi, fgmaskMatDilateRoi, kernelERoi);

        mask_binary    = new OpenCVForUnity.Mat();
        mask_binaryRoi = new OpenCVForUnity.Mat();

        Imgproc.threshold(fgmaskMatDilate, mask_binary, 123, 255, Imgproc.THRESH_BINARY);
        Imgproc.threshold(fgmaskMatDilateRoi, mask_binaryRoi, 123, 255, Imgproc.THRESH_BINARY);

        List <MatOfPoint> contours = new List <MatOfPoint>();

        OpenCVForUnity.Mat hierarchy = new OpenCVForUnity.Mat();

        Imgproc.findContours(mask_binary, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);

        if (contours.Count == 0)
        {
            return(null);
        }
        else
        {
            int i = 0;
            color1 = new Color(0.8f, 0.8f, 0.95f, 0.25f);
            color2 = new Color(0.8f, 0.8f, 0.95f);
            foreach (MatOfPoint contour in contours)
            {
                //Debug.Log("number of target: " + i);
                MatOfPoint new_mat1 = new MatOfPoint(contour.toArray());
                output = Imgproc.boundingRect(new_mat1);
                rgbMat.copyTo(dest, mask_binaryRoi);
                //SaveMatToFile("mask_binary" + ss, mask_binary);
                //SaveMatToFile("mask_binaryRoi" + ss, mask_binaryRoi);
                Imgproc.rectangle(rgbMat, output.tl(), output.br(), new Scalar(255, 0, 0), 2);
                output_ar.Add(output);
                Vector3          top_left_pos     = new Vector3(output.x, Screen.height - output.y);
                Vector3          bottom_right_pos = new Vector3(output.x + output.width, Screen.height - (output.y + output.height));
                UnityEngine.Rect check_pos        = GetScreenRect(top_left_pos, bottom_right_pos);
                i++;
                if (Input.GetMouseButton(0) && check_pos.Contains(new Vector2(Input.mousePosition.x, Screen.height - Input.mousePosition.y)))
                {
                    Debug.Log("take it!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
                    //skipFrame = 50;
                    //shouldStartCamShift = true;
                    Debug.Log(output);
                    return(output);
                }

                /*else
                 * {
                 *  MatOfPoint new_mat2 = new MatOfPoint(contours[0].toArray()); //prende il blob più grande, è il primo perchè la funzione findcontours mette in ordine dal più grande al più piccolo.
                 *  output = Imgproc.boundingRect(new_mat2);
                 * }*/
            }
            //OnGUI();
            return(null);
        }
    }
        // Use this for initialization
        void Start()
        {
            Texture2D imgTexture = Resources.Load("matchshapes") as Texture2D;

            Mat srcMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC1);

            Utils.texture2DToMat(imgTexture, srcMat);
            Debug.Log("srcMat.ToString() " + srcMat.ToString());

            Mat dstMat = new Mat(srcMat.size(), CvType.CV_8UC3);

            //labeling
            Mat labels    = new Mat();
            Mat stats     = new Mat();
            Mat centroids = new Mat();
            int total     = Imgproc.connectedComponentsWithStats(srcMat, labels, stats, centroids);

            Debug.Log("labels.ToString() " + labels.ToString());
            Debug.Log("stats.ToString() " + stats.ToString());
            Debug.Log("centroids.ToString() " + centroids.ToString());
            Debug.Log("total " + total);

            // determine drawing color
            List <Scalar> colors = new List <Scalar> (total);

            colors.Add(new Scalar(0, 0, 0));
            for (int i = 1; i < total; ++i)
            {
                colors.Add(new Scalar(Random.Range(0, 255), Random.Range(0, 255), Random.Range(0, 255)));
            }

            // draw labels
            for (int i = 0; i < dstMat.rows(); ++i)
            {
                for (int j = 0; j < dstMat.cols(); ++j)
                {
                    Scalar color = colors [(int)labels.get(i, j) [0]];
                    dstMat.put(i, j, color.val [0], color.val [1], color.val [2]);
                }
            }

            // draw rectangle
            for (int i = 1; i < total; ++i)
            {
                int x      = (int)stats.get(i, Imgproc.CC_STAT_LEFT) [0];
                int y      = (int)stats.get(i, Imgproc.CC_STAT_TOP) [0];
                int height = (int)stats.get(i, Imgproc.CC_STAT_HEIGHT) [0];
                int width  = (int)stats.get(i, Imgproc.CC_STAT_WIDTH) [0];

                OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(x, y, width, height);

                Imgproc.rectangle(dstMat, rect.tl(), rect.br(), new Scalar(0, 255, 0), 2);
            }

            // draw centroids
            for (int i = 1; i < total; ++i)
            {
                int x = (int)centroids.get(i, 0) [0];
                int y = (int)centroids.get(i, 1) [0];

                Imgproc.circle(dstMat, new Point(x, y), 3, new Scalar(255, 0, 0), -1);
            }

            // draw index of label
            for (int i = 1; i < total; ++i)
            {
                int x = (int)stats.get(i, Imgproc.CC_STAT_LEFT) [0];
                int y = (int)stats.get(i, Imgproc.CC_STAT_TOP) [0];

                Imgproc.putText(dstMat, "" + i, new Point(x + 5, y + 15), Core.FONT_HERSHEY_COMPLEX, 0.5, new Scalar(255, 255, 0), 2);
            }


            Texture2D texture = new Texture2D(dstMat.cols(), dstMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(dstMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Example #12
0
    /// <summary>
    /// Hands the pose estimation process.
    /// </summary>
    public void handPoseEstimationProcess(Mat rgbaMat)
    {
        //Imgproc.blur(mRgba, mRgba, new Size(5,5));
        Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
        //Imgproc.medianBlur(mRgba, mRgba, 3);

        if (!isColorSelected)
        {
            return;
        }

        List <MatOfPoint> contours = detector.getContours();

        detector.process(rgbaMat);
        //Debug.Log(contours + " | " + contours.Count);
        //string[] output = contours.ToArray();

        for (int i = 0; i < contours.Count; i++)
        {
            //Debug.Log("MatOfPoint2f " + new MatOfPoint2f(contours[i].toArray()) + " | " + i);
            //Debug.Log("MatOfPoint " + contours [i] + " | " + i);
            //Imgproc.circle(rgbaMat, contours[i], 6, new Scalar(0, 255, 0, 255), -1);


            //Debug.Log ("kotka" +  MatOfPoint.ReferenceEquals(x, y));
        }

        if (contours.Count <= 0)
        {
            return;
        }


        RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

        double boundWidth  = rect.size.width;
        double boundHeight = rect.size.height;
        int    boundPos    = 0;

        for (int i = 1; i < contours.Count; i++)
        {
            rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
            if (rect.size.width * rect.size.height > boundWidth * boundHeight)
            {
                boundWidth  = rect.size.width;
                boundHeight = rect.size.height;
                boundPos    = i;
            }
        }

        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
        Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);
        //tochkaX = boundRect.tl ().x;
        //tochkaY = boundRect.tl ().y;
        Imgproc.circle(rgbaMat, boundRect.tl(), 6, new Scalar(0, 255, 0, 255), -1);
        Imgproc.circle(rgbaMat, boundRect.br(), 6, new Scalar(0, 255, 0, 255), -1);
        pointbX = boundRect.br().x;
        pointbY = boundRect.br().y;
        pointaX = boundRect.x;
        pointbY = boundRect.y;
        double a = boundRect.br().y - boundRect.tl().y;

        a = a * 0.7;
        a = boundRect.tl().y + a;
        Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);
        MatOfPoint2f pointMat = new MatOfPoint2f();

        Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
        contours[boundPos] = new MatOfPoint(pointMat.toArray());
        MatOfInt  hull         = new MatOfInt();
        MatOfInt4 convexDefect = new MatOfInt4();

        Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
        if (hull.toArray().Length < 3)
        {
            return;
        }
        Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);
        List <MatOfPoint> hullPoints = new List <MatOfPoint>();
        List <Point>      listPo     = new List <Point>();

        for (int j = 0; j < hull.toList().Count; j++)
        {
            listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
        }
        MatOfPoint e = new MatOfPoint();

        e.fromList(listPo);
        hullPoints.Add(e);
        List <MatOfPoint> defectPoints = new List <MatOfPoint>();
        List <Point>      listPoDefect = new List <Point>();

        for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
        {
            Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
            int   depth    = convexDefect.toList()[j + 3];
            if (depth > 8700 && farPoint.y < a)
            {
                listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
            }
        }

        MatOfPoint e2 = new MatOfPoint();

        e2.fromList(listPo);
        defectPoints.Add(e2);
        Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);
        this.numberOfFingers = listPoDefect.Count;
        if (this.numberOfFingers > 5)
        {
            this.numberOfFingers = 5;
        }
        foreach (Point p in listPoDefect)
        {
            Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
        }
    }
Example #13
0
        // Update is called once per frame
        void Update()
        {
            // Sample rate update
            ++frames;
            float timeNow = Time.realtimeSinceStartup;

            if (timeNow > lastInterval + updateInterval)
            {
                fps          = (float)(frames / (timeNow - lastInterval));
                frames       = 0;
                lastInterval = timeNow;
            }

            // Time since last update
            float dt = 1 / fps;

            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();
                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);


                Point[] points = roiPointList.ToArray();

                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat())
                    {
                        Imgproc.calcBackProject(new List <Mat>(new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);


                        ///////////////////////////////////////////////////////
                        // Kalman Filter Start
                        ///////////////////////////////////////////////////////

                        // Process noise matrix, Q
                        Matrix Q2 = (Mathf.Pow(pn, 2) / dt) * Matrix.Build.DenseOfArray(new float[, ]
                        {
                            { Mathf.Pow(dt, 4) / 4, Mathf.Pow(dt, 3) / 2f, Mathf.Pow(dt, 2) / 2f },
                            { Mathf.Pow(dt, 3) / 2f, Mathf.Pow(dt, 2) / 2f, dt },
                            { Mathf.Pow(dt, 2) / 2f, dt, 1 }
                        });

                        Q = (Mathf.Pow(pn, 2) / dt) * Matrix.Build.DenseOfMatrixArray(new Matrix[, ]
                        {
                            { Q2, zero3x3, zero3x3 },
                            { zero3x3, Q2, zero3x3 },
                            { zero3x3, zero3x3, Q2 }
                        });

                        // Measurement noise matrix, R
                        R = Mathf.Pow(mn, 2) * Matrix.Build.DenseIdentity(3);

                        // Build transition and control matrices
                        Matrix F2 = Matrix.Build.DenseOfArray(new float[, ] {
                            { 1, dt, Mathf.Pow(dt, 2) / 2f }, { 0, 1, dt }, { 0, 0, 1 }
                        });
                        Matrix F = Matrix.Build.DenseOfMatrixArray(new Matrix[, ]
                        {
                            { F2, zero3x3, zero3x3 },
                            { zero3x3, F2, zero3x3 },
                            { zero3x3, zero3x3, F2 },
                        });

                        // Prediction
                        Pp = F * Pe * F.Transpose() + Q;
                        xp = F * xe;

                        roiPred = new OpenCVForUnity.Rect((int)(xp[0] - 0.5f * xp[6]), (int)(xp[3] - 0.5f * xp[6]), (int)xp[6], (int)xp[6]);
                        roiRect = new OpenCVForUnity.Rect((int)(xp[0] - 0.5f * roiSearch.width), (int)(xp[3] - 0.5f * roiSearch.height), roiSearch.width, roiSearch.height);
                        // roiRect = roiPred.clone();

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        ObjectFound = (roiRect.height > 0 && roiRect.width > 0);

                        // Innovation
                        Vector nu;
                        if (ObjectFound)
                        {
                            // Innovation
                            Vector zk = Vector.Build.DenseOfArray(new float[] { (int)(roiRect.x + 0.5f * roiRect.width),
                                                                                (int)(roiRect.y + 0.5f * roiRect.height),
                                                                                (int)(0.5 * (roiRect.width + roiRect.height)) });
                            nu = zk - H * xp;

                            // Search window update
                            roiSearch = r.boundingRect().clone();

                            // Debug
                            SpeedAtFailure = -1f;
                        }
                        else
                        {
                            roiRect = roiPred.clone();

                            if (xp[0] < 0 || xp[0] < 0 || xp[0] > 640 || xp[3] > 480)
                            {
                                xp[0] = 320f; xp[1] = 0; xp[2] = 0;
                                xp[3] = 240f; xp[4] = 0; xp[5] = 0;
                                xp[6] = 40f; xp[7] = 0; xp[8] = 0;

                                roiRect.x      = (int)(320 - 0.5f * 40);
                                roiRect.y      = (int)(240 - 0.5f * 40);
                                roiRect.height = 40;
                                roiRect.width  = 40;

                                roiPred = roiRect.clone();
                            }

                            // Innovation
                            Vector zk = Vector.Build.DenseOfArray(new float[] { (float)(roiRect.x + 0.5f * roiRect.width),
                                                                                (float)(roiRect.y + 0.5f * roiRect.height),
                                                                                (float)(0.5 * (roiRect.width + roiRect.height)) });

                            nu        = zk - H * xp;
                            roiSearch = roiPred.clone();
                        }

                        // Kalman gain
                        Matrix K = Pp * H.Transpose() * R.Transpose();


                        // Innovation gain
                        Vector gain = K * nu;

                        // State update
                        xe = xp + gain;

                        // Covariance update
                        Pe = (Pp.Inverse() + H.Transpose() * R.Transpose() * H).Inverse();

                        // Display results to console
                        StateArray      = xe.ToArray();
                        InnovationGain  = gain.ToArray();
                        CovarianceTrace = Pe.Diagonal().ToArray();

                        ///////////////////////////////////////////////////////
                        // Kalman Filter End
                        ///////////////////////////////////////////////////////

                        r.points(points);
                    }

                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Clear();
                    }
                }


                if (roiPointList.Count < 4)
                {
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //												Debug.Log ("mouse X " + Input.mousePosition.x);
                        //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList[roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }

                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray()))
                        {
                            roiRect   = Imgproc.boundingRect(roiPointMat);
                            roiPred   = roiRect.clone();
                            roiSearch = roiRect.clone();
                        }

                        ///////////////////////////////////////////////////////
                        // Kalman Filter Initialize
                        ///////////////////////////////////////////////////////
                        Pe = Matrix.Build.DenseIdentity(9, 9);
                        Vector z1 = roi2center(roiRect);
                        xe = Vector.Build.DenseOfArray(new float[] { z1[0], 0, 0, z1[1], 0, 0, (roiRect.width + roiRect.height) / 2, 0, 0 });

                        ///////////////////////////////////////////////////////
                        // End Kalman Filter Initialize
                        ///////////////////////////////////////////////////////

                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat())
                            {
                                Imgproc.calcHist(new List <Mat>(new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                                //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Core.circle(rgbaMat, points[i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    Core.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                    Core.rectangle(rgbaMat, roiPred.tl(), roiPred.br(), new Scalar(0, 0, 255, 255), 2);
                    Core.rectangle(rgbaMat, roiSearch.tl(), roiSearch.br(), new Scalar(255, 0, 0, 255), 2);
                }

                Core.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);

                //				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            Mat bgraMatClipROI = new Mat(bgraMat, processingAreaRect);

            Imgproc.cvtColor(bgraMatClipROI, grayMat, Imgproc.COLOR_BGRA2GRAY);

            bgMat.copyTo(dstMatClippingROI);

            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);


            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;

                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;
                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;
                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);
            maskMat.put(0, 0, maskPixels);
            grayMat.copyTo(dstMatClippingROI, maskMat);


            Imgproc.Canny(lineMat, lineMat, 20, 120);

            lineMat.copyTo(maskMat);

            Core.bitwise_not(lineMat, lineMat);

            lineMat.copyTo(dstMatClippingROI, maskMat);


            //Imgproc.putText (dstMat, "W:" + dstMat.width () + " H:" + dstMat.height () + " SO:" + Screen.orientation, new Point (5, dstMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (0), 2, Imgproc.LINE_AA, false);

            Imgproc.cvtColor(dstMat, bgraMat, Imgproc.COLOR_GRAY2BGRA);

            //21n descommented to verify si pinta un rectangle
            Imgproc.rectangle(bgraMat, new Point(0, 0), new Point(bgraMat.width(), bgraMat.height()), new Scalar(0, 0, 255, 255), 4);
            Imgproc.rectangle(bgraMat, processingAreaRect.tl(), processingAreaRect.br(), new Scalar(0, 0, 255, 255), 4);
            //

            bgraMatClipROI.Dispose();


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                Utils.fastMatToTexture2D(bgraMat, texture);
                bgraMat.Dispose();

                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);
                position        *= 1.2f;

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }, false);
        }
Example #15
0
        /*=============================================*
        * 輪郭ごとの頂点から手を判別するまで
        *=============================================*/
        /// <summary>
        /// Contours to hand gesture.
        /// </summary>
        /// <param name="rgbaMat">Rgba mat.</param>
        /// <param name="contour">Contour.</param>
        private static void _contourToHandGesture(Mat rgbaMat, MatOfPoint contour)
        {
            try
            {
                //頂点を調査する準備をする
                _pointOfVertices(rgbaMat, contour);

                //基準輪郭のサイズの取得と描画(長方形)
                OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
                Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(ContourRangeColor), 2, 8, 0);

                /*=============================================*
                 * 腕まで含んだ手の大きさを取得する
                 **=============================================*/
                //腕まで含んだ手の大きさを識別する
                MatOfInt hull = new MatOfInt();
                Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

                //腕まで含んだ手の範囲を取得
                List <Point> armPointList = new List <Point>();
                for (int j = 0; j < hull.toList().Count; j++)
                {
                    Point armPoint = contour.toList()[hull.toList()[j]];
                    bool  addFlag  = true;
                    foreach (Point point in armPointList.ToArray())
                    {
                        //輪郭の1/10より近い頂点は誤差としてまとめる
                        double distance = Mathf.Sqrt((float)((armPoint.x - point.x) * (armPoint.x - point.x) + (armPoint.y - point.y) * (armPoint.y - point.y)));
                        if (distance <= Mathf.Min((float)boundRect.width, (float)boundRect.height) / 10)
                        {
                            addFlag = false;
                            break;
                        }
                    }
                    if (addFlag)
                    {
                        armPointList.Add(armPoint);
                    }
                }

                MatOfPoint armMatOfPoint = new MatOfPoint();
                armMatOfPoint.fromList(armPointList);
                List <MatOfPoint> armPoints = new List <MatOfPoint>();
                armPoints.Add(armMatOfPoint);

                //腕まで含んだ手の範囲を描画
                Imgproc.drawContours(rgbaMat, armPoints, -1, HGColorSpuiter.ColorToScalar(ArmRangeColor), 3);

                //腕まで含んだ手が三角形の場合はそれ以上の識別が難しい
                if (hull.toArray().Length < 3)
                {
                    return;
                }

                /*=============================================*
                 * 掌の大きさを取得する
                 **=============================================*/
                //凸面の頂点から凹面の点のみを取得し、掌の範囲を取得する
                MatOfInt4 convexDefect = new MatOfInt4();
                Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

                //凹面の点をフィルタリングして取得
                List <Point> palmPointList = new List <Point>();
                for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
                {
                    Point farPoint = contour.toList()[convexDefect.toList()[j + 2]];
                    int   depth    = convexDefect.toList()[j + 3];
                    if (depth > depthThreashold && farPoint.y < boundRect.br().y - boundRect.tl().y)
                    {
                        palmPointList.Add(contour.toList()[convexDefect.toList()[j + 2]]);
                    }
                }

                MatOfPoint palmMatOfPoint = new MatOfPoint();
                palmMatOfPoint.fromList(palmPointList);
                List <MatOfPoint> palmPoints = new List <MatOfPoint>();
                palmPoints.Add(palmMatOfPoint);

                //掌の範囲を描画
                Imgproc.drawContours(rgbaMat, palmPoints, -1, HGColorSpuiter.ColorToScalar(PalmRangeColor), 3);

                /*=============================================*
                 * 掌+指先の大きさを取得する
                 **=============================================*/
                //掌の位置を元に手首を除いた範囲を取得する
                List <Point> handPointList = new List <Point>();
                handPointList.AddRange(armPointList.ToArray());
                handPointList.Reverse();
                handPointList.RemoveAt(0);
                handPointList.Insert(0, palmPointList.ToArray()[0]);
                handPointList.RemoveAt(handPointList.Count - 1);
                handPointList.Insert(handPointList.Count, palmPointList.ToArray()[palmPointList.Count - 1]);

                MatOfPoint handMatOfPoint = new MatOfPoint();
                handMatOfPoint.fromList(handPointList);
                List <MatOfPoint> handPoints = new List <MatOfPoint>();
                handPoints.Add(handMatOfPoint);

                Imgproc.drawContours(rgbaMat, handPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

                /*=============================================*
                 * 指先の位置を取得する
                 **=============================================*/
                //掌の各頂点の中心を求める
                List <Point> palmCenterPoints = new List <Point>();
                for (int i = 0; i < palmPointList.Count; i++)
                {
                    Point palmPoint     = palmPointList.ToArray()[i];
                    Point palmPointNext = new Point();
                    if (i + 1 < palmPointList.Count)
                    {
                        palmPointNext = palmPointList.ToArray()[i + 1];
                    }
                    else
                    {
                        palmPointNext = palmPointList.ToArray()[0];
                    }

                    Point palmCenterPoint = new Point((palmPoint.x + palmPointNext.x) / 2, (palmPoint.y + palmPointNext.y) / 2);
                    palmCenterPoints.Add(palmCenterPoint);
                }

                //掌の頂点から最も近い手の頂点を求める
                for (int i = 0; i < palmCenterPoints.Count && i + 1 < handPointList.Count && i < 5; i++)
                {
                    Point palmPoint = palmCenterPoints.ToArray()[i];


                    List <Point> fingerList = new List <Point>();
                    fingerList.Add(palmPoint);
                    fingerList.Add(handPointList.ToArray()[i + 1]);

                    MatOfPoint fingerPoint = new MatOfPoint();
                    fingerPoint.fromList(fingerList);

                    List <MatOfPoint> fingerPoints = new List <MatOfPoint>();
                    fingerPoints.Add(fingerPoint);

                    Imgproc.drawContours(rgbaMat, fingerPoints, -1, HGColorSpuiter.ColorToScalar(FingerRangeColor), 3);
                }

//				Imgproc.putText(rgbaMat, "", new Point(2, rgbaMat.rows()-30), Core.FONT_HERSHEY_SIMPLEX, 1.0, HGColorSpuiter.ColorToScalar(Color.black), 2, Imgproc.LINE_AA, false);
            }
            catch (System.Exception e)
            {
                Debug.Log(e.Message);
            }
        }
Example #16
0
    public static void affineTransform(Mat src, Mat dst, OpenCVForUnity.Rect roi)
    {
        // アフィン行列を取得
        var srcPoints = new MatOfPoint2f(new Point(0.0, 0.0), new Point(src.cols() - 1, 0.0), new Point(src.cols() - 1, src.rows() - 1));
        var dstPoints = new MatOfPoint2f(roi.tl(), new Point(roi.x + roi.width, roi.y), roi.br());
        Mat transform = Imgproc.getAffineTransform(srcPoints, dstPoints);

        // アフィン変換
        Imgproc.warpAffine(src, dst, transform, dst.size(), Imgproc.INTER_LINEAR);
    }