// Use this for initialization
				void Start ()
				{
	
						Mat imgMat = new Mat (500, 500, CvType.CV_8UC3, new Scalar (0, 0, 0));
						Debug.Log ("imgMat dst ToString " + imgMat.ToString ());


						int rand_num = 50;
						MatOfPoint pointsMat = new MatOfPoint ();
						pointsMat.alloc (rand_num);

						Core.randu (pointsMat, 100, 400);

						Point[] points = pointsMat.toArray ();
						for (int i=0; i<rand_num; ++i) {
						
								Core.circle (imgMat, points [i], 2, new Scalar (255, 255, 255), -1);
						}

	
						MatOfInt hullInt = new MatOfInt ();
						Imgproc.convexHull (pointsMat, hullInt);


						List<Point> pointMatList = pointsMat.toList ();
						List<int> hullIntList = hullInt.toList ();
						List<Point> hullPointList = new List<Point> ();

						for (int j=0; j < hullInt.toList().Count; j++) {
								hullPointList.Add (pointMatList [hullIntList [j]]);
						}

						MatOfPoint hullPointMat = new MatOfPoint ();
		
						hullPointMat.fromList (hullPointList);

						List<MatOfPoint> hullPoints = new List<MatOfPoint> ();

						hullPoints.Add (hullPointMat);
		
		
		
						Imgproc.drawContours (imgMat, hullPoints, -1, new Scalar (0, 255, 0), 2);


						Imgproc.cvtColor (imgMat, imgMat, Imgproc.COLOR_BGR2RGB);

						Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
						Utils.matToTexture2D (imgMat, texture);
		
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				}
Beispiel #2
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IPHONE && !UNITY_EDITOR
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.transform.eulerAngles = new Vector3(0, 0, 0);
                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
                                                                                #endif
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										if (webCamTexture.videoRotationAngle == 270)
//												scaleY = -1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    Camera.main.orthographicSize = webCamTexture.width / 2;
                                                                                #else
                    Camera.main.orthographicSize = webCamTexture.height / 2;
                                                                                #endif

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IPHONE && !UNITY_EDITOR
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.width / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("OpenCVForUnitySample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            GUILayout.EndVertical();
        }
    }
}
Beispiel #3
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 *  Parameters:
                 *      prevImg first 8-bit input image
                 *      nextImg second input image
                 *      prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 *      nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 *      status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 *      err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Imgproc.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Imgproc.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }

//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                //flip to correct direction.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(rgbaMat);

            //                      Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours [boundPos];

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //                      Debug.Log (
            //                      " Row start [" +
            //                              (int)boundRect.tl ().y + "] row end [" +
            //                              (int)boundRect.br ().y + "] Col start [" +
            //                              (int)boundRect.tl ().x + "] Col end [" +
            //                              (int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //                      Debug.Log (
            //                      " A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();
            List <Point>      listPo     = new List <Point> ();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList() [hull.toList() [j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <Point> listPoDefect = new List <Point> ();

            if (convexDefect.rows() > 0)
            {
                List <int>   convexDefectList = convexDefect.toList();
                List <Point> contourList      = contour.toList();
                for (int j = 0; j < convexDefectList.Count; j = j + 4)
                {
                    Point farPoint = contourList [convexDefectList [j + 2]];
                    int   depth    = convexDefectList [j + 3];
                    if (depth > threasholdSlider.value && farPoint.y < a)
                    {
                        listPoDefect.Add(contourList [convexDefectList [j + 2]]);
                    }
                    //                              Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
                }
            }


            //                      Debug.Log ("hull: " + hull.toList ());
            //                      if (convexDefect.rows () > 0) {
            //                          Debug.Log ("defects: " + convexDefect.toList ());
            //                      }

            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            //                      int defectsTotal = (int)convexDefect.total();
            //                      Debug.Log ("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

            //                      Debug.Log ("numberOfFingers " + numberOfFingers);

            //                      Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
 public static Vector2[] MatOfPointToVector2Array(MatOfPoint matOfPoint)
 {
     return(PointArrayToVector2Array(matOfPoint.toArray()));
 }
    //Optical flow
    IEnumerator OpticalFlow()
    {
        Scalar tempHue;
        Scalar tempSpeed;

        int iCountTrackedPoints = 0;
        int vecCount            = 0;

        if (mMOP2fptsPrev.rows() == 0)
        {
            // first time through the loop so we need prev and this mats
            Imgproc.cvtColor(openCVCreateMat.rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // copy that to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            //if (blurImage == true){
            //Gaussian filter of the MOG2 images
            //Imgproc.GaussianBlur(matOpFlowPrev, matOpFlowPrev, kernelSize, sigmaX, sigmaY);//Gauss filter
            //}
            // get prev corners
            Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, qLevel, minDistCorners);             //SLIDER input
            mMOP2fptsPrev.fromArray(MOPcorners.toArray());

            // get safe copy of this corners
            mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
        }
        else
        {
            // we've been through before so
            // this mat is valid. Copy it to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            // get this mat
            Imgproc.cvtColor(openCVCreateMat.rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            //if (blurImage == true){
            //Gaussian filter of the MOG2 images
            //Imgproc.GaussianBlur(matOpFlowThis, matOpFlowThis, kernelSize, sigmaX, sigmaY);//Gauss filter
            //}
            // get the corners for this mat
            Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, qLevel, minDistCorners);             // SLIDER input
            mMOP2fptsThis.fromArray(MOPcorners.toArray());

            // retrieve the corners from the prev mat (saves calculating them again)
            mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

            // and save this corners for next time through
            mMOP2fptsThis.copyTo(mMOP2fptsSafe);
        }

        Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

        if (mMOBStatus.rows() > 0)
        {
            List <Point> cornersPrev = mMOP2fptsPrev.toList();
            List <Point> cornersThis = mMOP2fptsThis.toList();
            List <byte>  byteStatus  = mMOBStatus.toList();

            int x = 0;
            int y = byteStatus.Count - 1;

            double absX;
            double absY;             //will use for calculation of polar coordiates

            for (x = 0; x < y; x++)
            {
                if (byteStatus [x] == 1)
                {
                    Point pt  = cornersThis [x];
                    Point pt2 = cornersPrev [x];

                    //if (pt != pt2) {//I think this IF statement should be removed as pt and pt2 should always be differnt

                    float mySpeed = CalculateSpeedFloat(pt, pt2);

                    absX = pt.x - pt2.x;
                    absY = pt.y - pt2.y;
                    float angle = Mathf.Atan2((float)absX, (float)absY) * Mathf.Rad2Deg;
                    angle = Mathf.RoundToInt(angle);

                    //Get Hue based on Angle
                    tempHue   = GetHueColor((int)angle);
                    tempSpeed = GetSpeedColor((int)mySpeed);

                    //Store so we can add tracers
                    if (mySpeed > maxSpeed)                               //|| CalculateSpeedFloat (pt, pt2) <= 1
                    {
                        yield return(null);
                    }
                    else
                    {
                        tracerPoints.AddTracersToStorage(pt, pt2, tempHue, tempSpeed, videoPlayer.frame, angle, mySpeed);
                        speedVec = speedVec + mySpeed;
                        angleVec = angleVec + angle;
                        vecCount++;
                        //tracerPoints2.AddTracersToStorage (pt, pt2, tempSpeed, videoPlayer.frame, angle, mySpeed);

                        //ADD STORING SPEEDS TO VECTOR
                        //CSVDataEmail.AddDataTrack (speed,angle.ToString ());
                    }
                    iCountTrackedPoints++;
                }
            }
        }

        meanSpeed = (int)(speedVec / vecCount);
        meanAngle = (int)(angleVec / vecCount);
        //sTrackingLogger = "Video frame: " + videoPlayer.frame.ToString() + "    Points: " + iCountTrackedPoints.ToString() + "";
        sTrackingLogger = "Speed: " + meanSpeed.ToString() + " Angle: " + meanAngle.ToString() + "";

        textTrackedPoints.text = sTrackingLogger;
        yield return(null);
    }
        public void ProcessSkin(Mat rgbaImage)
        {
            Imgproc.pyrDown(rgbaImage, mPyrDownMat);
            Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
            Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA);
            Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb);

            Core.inRange(mHsvMat, mLowerBoundHSV, mUpperBoundHSV, mMaskHSV);
            Core.inRange(mPyrDownMat, mLowerBoundRGB, mUpperBoundRGB, mMaskRGB);
            Core.inRange(mYCrCbMat, mLowerBoundYCrCb, mUpperBoundYCrCb, mMaskYCrCb);

            mMask = mMaskYCrCb & mMaskHSV & mMaskRGB;

            Imgproc.dilate(mMask, mDilatedMask, new Mat());

            List <MatOfPoint> contours = new List <MatOfPoint>();

            Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            if (contours.Count == 0)
            {
                return;
            }

            // Find max contour area
            double     maxArea              = 0;
            double     secondMaxArea        = 0;
            MatOfPoint biggestContour       = null;
            MatOfPoint secondBiggestContour = null;

            foreach (MatOfPoint each in contours)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.contourArea(wrapper);
                if (area > maxArea)
                {
                    secondMaxArea        = maxArea;
                    secondBiggestContour = biggestContour;

                    maxArea        = area;
                    biggestContour = each;
                }
                else if (area > secondMaxArea)
                {
                    secondMaxArea        = area;
                    secondBiggestContour = each;
                }
            }

            handContourSize = maxArea;

            if ((biggestContour != null) && (secondBiggestContour != null) && (ComputeAVGXForContour(biggestContour) >= ComputeAVGXForContour(secondBiggestContour)) && (secondMaxArea >= 0.3 * maxArea))
            {
                biggestContour  = secondBiggestContour;
                handContourSize = secondMaxArea;
            }


            MatOfPoint2f contours_res2f = new MatOfPoint2f();

            MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray());

            Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true);
            handContour = new MatOfPoint(contours_res2f.toArray());
            contours_res2f.Dispose();
            biggestContour2f.Dispose();

            if (Imgproc.contourArea(handContour) > mMinContourArea * maxArea)
            {
                Core.multiply(handContour, new Scalar(4, 4), handContour);
            }

            // Filter contours by area and resize to fit the original image size
            mContours.Clear();

            foreach (MatOfPoint each in contours)
            {
                MatOfPoint contour = each;
                if (Imgproc.contourArea(contour) > mMinContourArea * maxArea)
                {
                    Core.multiply(contour, new Scalar(4, 4), contour);
                    mContours.Add(contour);
                }
            }
        }
    // Update is called once per frame
    void Update()
    {
        if (!IsStarted)
        {
            return;
        }
        Mat grayMat = webCamTextureToMat.GetMat();

        Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
        //Debug.Log("mMOP2fptsPrev.rows() : " + mMOP2fptsPrev.rows().ToString());

        //Debug.Log("rgbaMat.rows() : " + rgbaMat.rows().ToString());
        //Debug.Log("matOpFlowThis.rows() : " + matOpFlowThis.rows().ToString());

        if (mMOP2fptsPrev.rows() == 0)
        {
            // first time through the loop so we need prev and this mats
            // plus prev points
            // get this mat
            //rgbaMat.copyTo(matOpFlowThis);

            grayMat.copyTo(matOpFlowThis);
            grayMat.copyTo(matOpFlowPrev);
            //matOpFlowThis = rgbaMat;
            //matOpFlowPrev = rgbaMat;
            matOpFlowPrev.empty();
            //matOpFlowPrev = new Mat(rgbaMat.size(), rgbaMat.type());
            //Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // copy that to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            // get prev corners
            Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.1, 100);
            mMOP2fptsPrev.fromArray(MOPcorners.toArray());

            // get safe copy of this corners
            mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
            //Debug.Log("opencv optical flow --- 1 ");
        }
        else
        {
            // we've been through before so
            // this mat is valid. Copy it to prev mat
            //rgbaMat.copyTo(matOpFlowThis);
            //matOpFlowPrev = new Mat(rgbaMat.size(), rgbaMat.type());
            matOpFlowThis.copyTo(matOpFlowPrev);

            //matOpFlowThis = new Mat(rgbaMat.size(), rgbaMat.type());

            // get this mat
            grayMat.copyTo(matOpFlowThis);
            //matOpFlowThis = rgbaMat;
            //Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // get the corners for this mat
            Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.1, 100);
            mMOP2fptsThis.fromArray(MOPcorners.toArray());

            // retrieve the corners from the prev mat
            // (saves calculating them again)
            mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

            // and save this corners for next time through

            mMOP2fptsThis.copyTo(mMOP2fptsSafe);

            //Debug.Log("opencv optical flow --- 2 ");
        }


        /*
         *  Parameters:
         *      prevImg first 8-bit input image
         *      nextImg second input image
         *      prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
         *      nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
         *      status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
         *      err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
         */
        Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

        if (!mMOBStatus.empty())
        {
            List <Point> cornersPrev = mMOP2fptsPrev.toList();
            List <Point> cornersThis = mMOP2fptsThis.toList();
            List <byte>  byteStatus  = mMOBStatus.toList();

            int x = 0;
            int y = byteStatus.Count - 1;

            int num_distance = 0;
            for (x = 0; x < y; x++)
            {
                if (byteStatus[x] == 1)
                {
                    Point pt  = cornersThis[x];
                    Point pt2 = cornersPrev[x];

                    Imgproc.circle(grayMat, pt, 5, colorRed, iLineThickness - 1);

                    Imgproc.line(grayMat, pt, pt2, colorRed, iLineThickness);
                    double distance = System.Math.Sqrt(System.Math.Pow((pt2.x - pt.x), 2.0) + System.Math.Pow((pt2.y - pt.y), 2.0));
                    if (distance > 20)
                    {
                        num_distance++;
                    }

                    //Utilities.Debug("Distance[" + x + "] : " + distance);
                    //Debug.Log("Distance[" + x + "] : " + distance);
                }
            }
            Debug.Log("Num of Distance : " + num_distance);
            if (num_distance > 0)
            {
                Debug.Log("Movement Detected !!");
            }
        }

        //              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
        //this.GetComponent<CVCore>().Add(0, rgbaMat);
        Utils.matToTexture2D(grayMat, texture, colors);
        gameObject.GetComponent <Renderer>().material.mainTexture = texture;
    }
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(rgbaMat);

            //Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));

            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //(int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");

            Point bottomLeft  = new Point(boundRect.x, boundRect.y + boundRect.height);
            Point topLeft     = new Point(boundRect.x, boundRect.y);
            Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height);
            Point topRight    = new Point(boundRect.x + boundRect.width, boundRect.y);

            rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y),                                      //topleft
                                          new Point(boundRect.x + boundRect.width, boundRect.y),                    //Top Right
                                          new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right
                                          new Point(boundRect.x, boundRect.y + boundRect.height)                    //Bottom Left
                                          );

            //double a = boundRect.br ().y - boundRect.tl ().y;
            //a = a * 0.7;
            //a = boundRect.tl ().y + a;

            //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

            List <Point3> m_markerCorners3dList = new List <Point3>();

            m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A)
            m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B)
            m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C)
            m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D)
            m_markerCorners3d.fromList(m_markerCorners3dList);

            //estimate pose
            Mat Rvec = new Mat();
            Mat Tvec = new Mat();
            Mat raux = new Mat();
            Mat taux = new Mat();

            Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux);

            raux.convertTo(Rvec, CvType.CV_32F);
            taux.convertTo(Tvec, CvType.CV_32F);

            rotMat = new Mat(3, 3, CvType.CV_64FC1);
            Calib3d.Rodrigues(Rvec, rotMat);

            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            //Debug.Log ("transformationM " + transformationM.ToString ());

            Rvec.Dispose();
            Tvec.Dispose();
            raux.Dispose();
            taux.Dispose();
            rotMat.Dispose();

            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
            //Debug.Log("arM " + ARM.ToString());

            if (ARGameObject != null)
            {
                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                if (deactivateCoroutine == null)
                {
                    deactivateCoroutine = StartCoroutine(Wait(10.0f));
                }
                ARGameObject.SetActive(true);
            }

            //end pose estimation

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <Point> listPoDefect = new List <Point>();

            if (convexDefect.rows() > 0)
            {
                List <int>   convexDefectList = convexDefect.toList();
                List <Point> contourList      = contour.toList();
                for (int j = 0; j < convexDefectList.Count; j = j + 4)
                {
                    Point farPoint = contourList[convexDefectList[j + 2]];
                    int   depth    = convexDefectList[j + 3];
                    //if (depth > threasholdSlider.value && farPoint.y < a)
                    //{
                    //    listPoDefect.Add(contourList[convexDefectList[j + 2]]);
                    //}
                    //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
                }
            }


            Debug.Log("hull: " + hull.toList());
            if (convexDefect.rows() > 0)
            {
                Debug.Log("defects: " + convexDefect.toList());
            }

            //use these contours to do heart detection
            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            int defectsTotal = (int)convexDefect.total();

            Debug.Log("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

            Debug.Log("numberOfFingers " + numberOfFingers);

            Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Beispiel #11
0
    void Start()
    {
        Mat mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3);
        Mat grayMat = new Mat();

        sourceRawImage.texture = baseTexture;

        Utils.texture2DToMat(baseTexture, mainMat);
        mainMat.copyTo(grayMat);

        Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);
        Imgproc.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0);
        Imgproc.threshold(grayMat, grayMat, 110, 225, Imgproc.THRESH_BINARY);
        Imgproc.Canny(grayMat, grayMat, 20, 190);

        List <MatOfPoint> contours = new List <MatOfPoint>();

        Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        int num = 0;
        List <MatOfPoint> contours_list = new List <MatOfPoint>();

        //  new logic
        // List<MatOfPoint> contours_list = new List<MatOfPoint>();
        for (int i = 0; i < contours.Count; i++)
        {
            MatOfPoint   cp     = contours[i];
            MatOfPoint2f cn     = new MatOfPoint2f(cp.toArray());
            double       p      = Imgproc.arcLength(cn, true);
            MatOfPoint2f approx = new MatOfPoint2f();
            Imgproc.approxPolyDP(cn, approx, 0.01 * p, true);
            double area = Imgproc.contourArea(contours[i]);
            if ((area > 30 && area < 100) && approx.toArray().Length > 8)
            {
                // Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4);
                contours_list.Add(contours[i]);
                num = num + 1;
                Debug.Log(area);
            }
        }



        // previously working
        // for(int i =0; i< contours.Count ; i++){
        //     MatOfPoint cp = contours[i];
        //     MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
        //     double p = Imgproc.arcLength(cn,true);

        //     //  fron akshay file
        //     double area = Imgproc.contourArea(contours[i]);
        //     if(area > 50){
        //         Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4);
        //         num = num + 1;
        //         Debug.Log(area);
        //     }

        // }



        // for(int i =0; i< contours.Count ; i++){
        //     double area = Imgproc.contourArea(contours[i]);
        //     if(area > 50){
        //         // Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4);
        //         contours_list.Add(contours[i]);
        //         num = num + 1;
        //     }
        // }

        for (int i = 0; i < contours_list.Count; i++)
        {
            Imgproc.drawContours(mainMat, contours_list, -1, new Scalar(0, 255, 0), 4);
        }

        Debug.Log("Number : " + num);
        info.text += (num - 1).ToString();

        Texture2D finaltexture = new Texture2D(grayMat.cols(), grayMat.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(grayMat, finaltexture);
        sourceRawImage.texture = finaltexture;
    }
Beispiel #12
0
    void FormatImageSquare()
    {
        Mat mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3);
        Mat grayMat = new Mat();

        //Convert Texture2d to Matrix
        Utils.texture2DToMat(baseTexture, mainMat);
        //copy main matrix to grayMat
        mainMat.copyTo(grayMat);

        //Convert color to gray
        Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        //Blur
        Imgproc.GaussianBlur(grayMat, grayMat, new Size(25, 25), 0);

        //contrast
        Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU);
        //extract edge
        Imgproc.Canny(grayMat, grayMat, 50, 50);

        //prepare for the finding contours
        List <MatOfPoint> contours = new List <MatOfPoint>();

        //find the contour from canny edge image
        Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        List <MatOfPoint> tempTargets = new List <MatOfPoint>();

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfPoint   cp = contours[i];
            MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
            double       p  = Imgproc.arcLength(cn, true);

            MatOfPoint2f approx = new MatOfPoint2f();
            //convret to polygon
            Imgproc.approxPolyDP(cn, approx, 0.03 * p, true);


            //find a contour with four points and large area
            int minContourArea = 10000;
            if (approx.toArray().Length == 4 && Imgproc.contourArea(approx) > minContourArea)
            {
                MatOfPoint approxPt = new MatOfPoint();
                approx.convertTo(approxPt, CvType.CV_32S);

                float maxCosine = 0;
                for (int j = 2; j < 5; j++)
                {
                    Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y));
                    Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y));

                    float angle = Mathf.Abs(Vector2.Angle(v1, v2));
                    maxCosine = Mathf.Max(maxCosine, angle);
                }

                if (maxCosine < 135f)
                {
                    tempTargets.Add(approxPt);
                }
            }
        }

        if (tempTargets.Count > 0)
        {
            //Get the first contour
            MatOfPoint approxPt = tempTargets[0];
            //Making Source Mat
            Mat srcPointMat = Converters.vector_Point_to_Mat(approxPt.toList(), CvType.CV_32F);

            //Making Destination Mat
            /*change these values*/
            List <Point> dstPoints = new List <Point>();
            dstPoints.Add(new Point(512, 0));
            dstPoints.Add(new Point(0, 0));
            dstPoints.Add(new Point(0, 512));
            dstPoints.Add(new Point(512, 512));

            Mat dstPointMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);

            //Make Perp transform
            Mat M         = Imgproc.getPerspectiveTransform(srcPointMat, dstPointMat);
            Mat warpedMat = new Mat(mainMat.size(), mainMat.type());
            //Crop and warp the image
            Imgproc.warpPerspective(mainMat, warpedMat, M, new Size(512, 512), Imgproc.INTER_LINEAR);
            warpedMat.convertTo(warpedMat, CvType.CV_8UC3);



            //Convert color to gray
            Imgproc.cvtColor(warpedMat, warpedMat, Imgproc.COLOR_BGR2GRAY);
            ////Blur
            //Imgproc.GaussianBlur(warpedMat, warpedMat, new Size(25, 25), 0);

            //contrast
            Imgproc.threshold(warpedMat, warpedMat, 0, 255, Imgproc.THRESH_OTSU);
            //resize
            Imgproc.resize(warpedMat, warpedMat, new Size(28, 28));
            //Create an empty final texture
            finalTexture = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false);
            //Convert material to texture2d
            Utils.matToTexture2D(warpedMat, finalTexture);
            targetRawImage.texture = finalTexture;
        }
    }
Beispiel #13
0
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            // rgbaMat.copyTo(mRgba);
            float DOWNSCALE_RATIO = 1.0f;

            if (enableDownScale)
            {
                mRgba           = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                // mRgba = rgbaMat;
                rgbaMat.copyTo(mRgba);
                DOWNSCALE_RATIO = 1.0f;
            }

            // Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(mRgba, mRgba, new Size(3, 3), 1, 1);
            // Imgproc.medianBlur(mRgba, mRgba, 3);


            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(mRgba);

            //            Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
            Imgproc.rectangle(mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //                    (int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //            Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            // Imgproc.rectangle(mRgba, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]] * DOWNSCALE_RATIO);
            }

            /*
             * MatOfPoint e = new MatOfPoint();
             * e.fromList(listPo);
             * hullPoints.Add(e);
             *
             * List<Point> listPoDefect = new List<Point>();
             *
             * if (convexDefect.rows() > 0)
             * {
             *  List<int> convexDefectList = convexDefect.toList();
             *  List<Point> contourList = contour.toList();
             *  for (int j = 0; j < convexDefectList.Count; j = j + 4)
             *  {
             *      Point farPoint = contourList[convexDefectList[j + 2]];
             *      int depth = convexDefectList[j + 3];
             *      if (depth > threshholdDetect && farPoint.y < a)
             *      {
             *          listPoDefect.Add(contourList[convexDefectList[j + 2]]);
             *          Imgproc.line(rgbaMat, farPoint, listPo[convexDefectList[j + 2]], new Scalar(255, 0, 0, 255),1,1);
             *      }
             *      //                    Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
             *  }
             * }*/


            //            Debug.Log ("hull: " + hull.toList ());
            //            if (convexDefect.rows () > 0) {
            //                Debug.Log ("defects: " + convexDefect.toList ());
            //            }

            //Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            for (int p = 0; p < listPo.Count; p++)
            {
                if (p % 2 == 0)
                {
                    Imgproc.circle(rgbaMat, listPo[p], 6, new Scalar(255, 0, 0, 255), -1);
                    // Imgproc.putText(rgbaMat,p.ToString(),listPo[p],1,1,new Scalar(255,0,0,255));

                    // check if close

                    List <Point> fLMscaled = OpenCVForUnityUtils.ConvertVector2ListToPointList(facePoints);

                    for (int q = 0; q < fLMscaled.Count; q++)
                    {
                        if (ifLessThanDPoint(listPo[p], fLMscaled[q], 8))
                        {
                            //Point1 = listPo[p];
                            //Point2 = fLMscaled[q];
                            handPoint = p;
                            facePoint = q;
                            print(Point1 + " " + Point2);
                        }
                    }

                    if (p == handPoint && facePoint != 0)
                    {
                        Point1 = listPo[p];
                        Point2 = fLMscaled[facePoint];
                        Imgproc.line(rgbaMat, Point1, Point2, new Scalar(255, 255, 255, 255));
                    }
                }
            }



            //            int defectsTotal = (int)convexDefect.total();
            //            Debug.Log ("Defect total " + defectsTotal);

            /*  numberOfFingers = listPoDefect.Count;
             * if (numberOfFingers > 5)
             *    numberOfFingers = 5;/
             *
             * //            Debug.Log ("numberOfFingers " + numberOfFingers);
             *
             * //            Imgproc.putText (rgbaMat, "" + numberOfFingers, new Point (rgbaMat.cols () / 2, rgbaMat.rows () / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
             *
             *
             * /*   foreach (Point p in listPoDefect) {
             *
             *    Point tempp = GetNearestL(p, listPo);
             *    tempp = ConvertDownscale(tempp, DOWNSCALE_RATIO);
             *    Point p2 = ConvertDownscale(p, DOWNSCALE_RATIO);
             *
             *    Imgproc.circle (rgbaMat, tempp, 6, new Scalar (0, 0, 255, 255), -1);
             *    Imgproc.circle(rgbaMat, p2, 6, new Scalar(255, 0, 255, 255), -1);
             * }*/
        }
Beispiel #14
0
        private Mat findPaper(Mat mainMat)
        {
            Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);
            // blur image
            Imgproc.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0);


            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels[i] = 0;

                if (grayPixels[i] < 70)
                {
                    grayPixels[i] = 0;

                    //maskPixels [i] = 1;
                }
                else if (70 <= grayPixels[i] && grayPixels[i] < 120)
                {
                    grayPixels[i] = 100;
                }
                else
                {
                    grayPixels[i] = 255;
                    //maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);

            //thresholding make mage blake and white
            Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU);

            //extract the edge image
            Imgproc.Canny(grayMat, grayMat, 50, 50);


            //prepare for finding contours
            List <MatOfPoint> contours = new List <MatOfPoint>();

            Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            List <MatOfPoint> tmpTargets = new List <MatOfPoint>();


            for (int i = 0; i < contours.Count; i++)
            {
                MatOfPoint   cp = contours[i];
                MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
                double       p  = Imgproc.arcLength(cn, true);

                MatOfPoint2f approx = new MatOfPoint2f();

                // lager skew greater 0.03?
                //convert contours to readable polyagon
                Imgproc.approxPolyDP(cn, approx, 0.03 * p, true);

                //find contours with 4 points
                if (approx.toArray().Length == 4)
                {
                    MatOfPoint approxPt = new MatOfPoint();
                    approx.convertTo(approxPt, CvType.CV_32S);
                    float maxCosine  = 0;
                    float rate       = 0;
                    float min_length = 100000000000000;


                    for (int j = 2; j < 5; j++)
                    {
                        Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y));
                        Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y));

                        float v1_length = Mathf.Sqrt(v1.x * v1.x + v1.y * v1.y);
                        float v2_length = Mathf.Sqrt(v2.x * v2.x + v2.y * v2.y);

                        min_length = Mathf.Min(Mathf.Min((float)(v1_length), (float)v2_length), min_length);


                        if (v1_length > v2_length)
                        {
                            rate = v2_length / v1_length;
                        }
                        else
                        {
                            rate = v1_length / v2_length;
                        }



                        float angle = Mathf.Abs(Vector2.Angle(v1, v2));
                        maxCosine = Mathf.Max(maxCosine, angle);
                    }


                    if (min_length > 100 && maxCosine < 135f)//  && rate >= 0.6  maxCosine < 135f &&
                    {
                        tmpTargets.Add(approxPt);
                        //Debug.Log("Length -----------" + min_length);

                        //Debug.Log("------------rate" + rate + "---------------");
                    }
                }
            }
            if (tmpTargets.Count > 0)
            {
                // -----------------------DRAW RECTANGLE---------------------------
                //MatOfPoint2f approxCurve = new MatOfPoint2f();

                //for (int i = 0; i < tmpTargets.Count; i++)
                //{
                //    //Convert contours(i) from MatOfPoint to MatOfPoint2f
                //    MatOfPoint2f contour2f = new MatOfPoint2f(tmpTargets[i].toArray());
                //    //Processing on mMOP2f1 which is in type MatOfPoint2f
                //    double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02;
                //    Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true);

                //    //Convert back to MatOfPoint
                //    MatOfPoint points = new MatOfPoint(approxCurve.toArray());

                //    // Get bounding rect of contour
                //    OpenCVForUnity.Rect rect = Imgproc.boundingRect(points);

                //    // draw enclosing rectangle (all same color, but you could use variable i to make them unique)
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 130, 255), 3);
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + 5, rect.y + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y), new Point(rect.x + +rect.width + 5, rect.y + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y + rect.height), new Point(rect.x + +rect.width + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y + rect.height), new Point(rect.x + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5);

                //}
                // -----------------------DRAW RECTANGLE---------------------------



                // get the first contours

                int largestPaper = findLargestContour(tmpTargets);
                //Debug.Log(largestPaper);
                // using the largest one
                paperCornerMatOfPoint = tmpTargets[largestPaper];


                // draw boundary
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[1], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[2], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[1], paperCornerMatOfPoint.toList()[2], new Scalar(0, 255, 0), 3);

                // extract target from the frame and adjust some angle....
                Mat srcPointsMat = Converters.vector_Point_to_Mat(paperCornerMatOfPoint.toList(), CvType.CV_32F);

                List <Point> dstPoints = new List <Point>();
                dstPoints.Add(new Point(0, 0));
                dstPoints.Add(new Point(0, 300));
                dstPoints.Add(new Point(200, 300));
                dstPoints.Add(new Point(200, 0));

                Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);
                //Make perspective transform
                Mat m         = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat);
                Mat warpedMat = new Mat(mainMat.size(), mainMat.type());
                Imgproc.warpPerspective(mainMat, warpedMat, m, new Size(200, 300), Imgproc.INTER_LINEAR);
                warpedMat.convertTo(warpedMat, CvType.CV_8UC3);


                Texture2D finalTargetTextue = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false);
                Utils.matToTexture2D(warpedMat, finalTargetTextue);

                targetRawImage.texture = finalTargetTextue;
                //Debug.Log(paperCornerMatOfPoint.toList()[0].ToString() + " " + paperCornerMatOfPoint.toList()[1].ToString()+ " " + paperCornerMatOfPoint.toList()[2].ToString()+ " " + paperCornerMatOfPoint.toList()[3].ToString());
            }
            //--------------------------------------------------------


            return(mainMat);
        }
Beispiel #15
0
 public RegionCandidate(int index, MatOfPoint contour)
 {
     this.index     = index;
     this.contour   = contour;
     this.contour2f = new MatOfPoint2f(contour.toArray());
 }
Beispiel #16
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // change the color space to YUV.
                Imgproc.cvtColor(rgbaMat, yuvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(yuvMat, yuvMat, Imgproc.COLOR_RGB2YUV);
                // grap only the Y component.
                Core.extractChannel(yuvMat, yMat, 0);

                // blur the image to reduce high frequency noises.
                Imgproc.GaussianBlur(yMat, yMat, new Size(3, 3), 0);
                // find edges in the image.
                Imgproc.Canny(yMat, yMat, 50, 200, 3);

                // find contours.
                List <MatOfPoint> contours = new List <MatOfPoint>();
                Find4PointContours(yMat, contours);

                // pick the contour of the largest area and rearrange the points in a consistent order.
                MatOfPoint maxAreaContour = GetMaxAreaContour(contours);
                maxAreaContour = OrderCornerPoints(maxAreaContour);

                bool found = (maxAreaContour.size().area() > 0);
                if (found)
                {
                    // trasform the prospective of original image.
                    using (Mat transformedMat = PerspectiveTransform(rgbaMat, maxAreaContour))
                    {
                        outputDisplayAreaMat.setTo(new Scalar(0, 0, 0, 255));

                        if (transformedMat.width() <= outputDisplayAreaMat.width() && transformedMat.height() <= outputDisplayAreaMat.height() &&
                            transformedMat.total() >= outputDisplayAreaMat.total() / 16)
                        {
                            int x = outputDisplayAreaMat.width() / 2 - transformedMat.width() / 2;
                            int y = outputDisplayAreaMat.height() / 2 - transformedMat.height() / 2;
                            using (Mat dstAreaMat = new Mat(outputDisplayAreaMat, new OpenCVForUnity.CoreModule.Rect(x, y, transformedMat.width(), transformedMat.height())))
                            {
                                transformedMat.copyTo(dstAreaMat);
                            }
                        }
                    }
                }

                if (isDebugMode)
                {
                    // draw edge image.
                    Imgproc.cvtColor(yMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);

                    // draw all found conours.
                    Imgproc.drawContours(rgbaMat, contours, -1, DEBUG_CONTOUR_COLOR, 1);
                }

                if (found)
                {
                    // draw max area contour.
                    Imgproc.drawContours(rgbaMat, new List <MatOfPoint> {
                        maxAreaContour
                    }, -1, CONTOUR_COLOR, 2);

                    if (isDebugMode)
                    {
                        // draw corner numbers.
                        for (int i = 0; i < maxAreaContour.toArray().Length; i++)
                        {
                            var pt = maxAreaContour.get(i, 0);
                            Imgproc.putText(rgbaMat, i.ToString(), new Point(pt[0], pt[1]), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, DEBUG_CORNER_NUMBER_COLOR, 1, Imgproc.LINE_AA, false);
                        }
                    }
                }

                rgbaMat.copyTo(inputDisplayAreaMat);

                Utils.fastMatToTexture2D(displayMat, texture, true, 0, true);
            }
        }
        /*=============================================*
        * 輪郭ごとの頂点から手を判別するまで
        *=============================================*/
        /// <summary>
        /// Contours to hand gesture.
        /// </summary>
        /// <param name="rgbaMat">Rgba mat.</param>
        /// <param name="contour">Contour.</param>
        private static void _contourToHandGesture(Mat rgbaMat, MatOfPoint contour)
        {
            try
            {
                //頂点を調査する準備をする
                _pointOfVertices(rgbaMat, contour);

                //基準輪郭のサイズの取得と描画(長方形)
                OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
                Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(ContourRangeColor), 2, 8, 0);

                /*=============================================*
                 * 腕まで含んだ手の大きさを取得する
                 **=============================================*/
                //腕まで含んだ手の大きさを識別する
                MatOfInt hull = new MatOfInt();
                Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

                //腕まで含んだ手の範囲を取得
                List <Point> armPointList = new List <Point>();
                for (int j = 0; j < hull.toList().Count; j++)
                {
                    Point armPoint = contour.toList()[hull.toList()[j]];
                    bool  addFlag  = true;
                    foreach (Point point in armPointList.ToArray())
                    {
                        //輪郭の1/10より近い頂点は誤差としてまとめる
                        double distance = Mathf.Sqrt((float)((armPoint.x - point.x) * (armPoint.x - point.x) + (armPoint.y - point.y) * (armPoint.y - point.y)));
                        if (distance <= Mathf.Min((float)boundRect.width, (float)boundRect.height) / 10)
                        {
                            addFlag = false;
                            break;
                        }
                    }
                    if (addFlag)
                    {
                        armPointList.Add(armPoint);
                    }
                }

                MatOfPoint armMatOfPoint = new MatOfPoint();
                armMatOfPoint.fromList(armPointList);
                List <MatOfPoint> armPoints = new List <MatOfPoint>();
                armPoints.Add(armMatOfPoint);

                //腕まで含んだ手の範囲を描画
                Imgproc.drawContours(rgbaMat, armPoints, -1, HGColorSpuiter.ColorToScalar(ArmRangeColor), 3);

                //腕まで含んだ手が三角形の場合はそれ以上の識別が難しい
                if (hull.toArray().Length < 3)
                {
                    return;
                }

                /*=============================================*
                 * 掌の大きさを取得する
                 **=============================================*/
                //凸面の頂点から凹面の点のみを取得し、掌の範囲を取得する
                MatOfInt4 convexDefect = new MatOfInt4();
                Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

                //凹面の点をフィルタリングして取得
                List <Point> palmPointList = new List <Point>();
                for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
                {
                    Point farPoint = contour.toList()[convexDefect.toList()[j + 2]];
                    int   depth    = convexDefect.toList()[j + 3];
                    if (depth > depthThreashold && farPoint.y < boundRect.br().y - boundRect.tl().y)
                    {
                        palmPointList.Add(contour.toList()[convexDefect.toList()[j + 2]]);
                    }
                }

                MatOfPoint palmMatOfPoint = new MatOfPoint();
                palmMatOfPoint.fromList(palmPointList);
                List <MatOfPoint> palmPoints = new List <MatOfPoint>();
                palmPoints.Add(palmMatOfPoint);

                //掌の範囲を描画
                Imgproc.drawContours(rgbaMat, palmPoints, -1, HGColorSpuiter.ColorToScalar(PalmRangeColor), 3);

                /*=============================================*
                 * 掌+指先の大きさを取得する
                 **=============================================*/
                //掌の位置を元に手首を除いた範囲を取得する
                List <Point> handPointList = new List <Point>();
                handPointList.AddRange(armPointList.ToArray());
                handPointList.Reverse();
                handPointList.RemoveAt(0);
                handPointList.Insert(0, palmPointList.ToArray()[0]);
                handPointList.RemoveAt(handPointList.Count - 1);
                handPointList.Insert(handPointList.Count, palmPointList.ToArray()[palmPointList.Count - 1]);

                MatOfPoint handMatOfPoint = new MatOfPoint();
                handMatOfPoint.fromList(handPointList);
                List <MatOfPoint> handPoints = new List <MatOfPoint>();
                handPoints.Add(handMatOfPoint);

                Imgproc.drawContours(rgbaMat, handPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

                /*=============================================*
                 * 指先の位置を取得する
                 **=============================================*/
                //掌の各頂点の中心を求める
                List <Point> palmCenterPoints = new List <Point>();
                for (int i = 0; i < palmPointList.Count; i++)
                {
                    Point palmPoint     = palmPointList.ToArray()[i];
                    Point palmPointNext = new Point();
                    if (i + 1 < palmPointList.Count)
                    {
                        palmPointNext = palmPointList.ToArray()[i + 1];
                    }
                    else
                    {
                        palmPointNext = palmPointList.ToArray()[0];
                    }

                    Point palmCenterPoint = new Point((palmPoint.x + palmPointNext.x) / 2, (palmPoint.y + palmPointNext.y) / 2);
                    palmCenterPoints.Add(palmCenterPoint);
                }

                //掌の頂点から最も近い手の頂点を求める
                for (int i = 0; i < palmCenterPoints.Count && i + 1 < handPointList.Count && i < 5; i++)
                {
                    Point palmPoint = palmCenterPoints.ToArray()[i];


                    List <Point> fingerList = new List <Point>();
                    fingerList.Add(palmPoint);
                    fingerList.Add(handPointList.ToArray()[i + 1]);

                    MatOfPoint fingerPoint = new MatOfPoint();
                    fingerPoint.fromList(fingerList);

                    List <MatOfPoint> fingerPoints = new List <MatOfPoint>();
                    fingerPoints.Add(fingerPoint);

                    Imgproc.drawContours(rgbaMat, fingerPoints, -1, HGColorSpuiter.ColorToScalar(FingerRangeColor), 3);
                }

//				Imgproc.putText(rgbaMat, "", new Point(2, rgbaMat.rows()-30), Core.FONT_HERSHEY_SIMPLEX, 1.0, HGColorSpuiter.ColorToScalar(Color.black), 2, Imgproc.LINE_AA, false);
            }
            catch (System.Exception e)
            {
                Debug.Log(e.Message);
            }
        }