Beispiel #1
0
        /// <summary>
        /// To release the resources for the initialized method.
        /// </summary>
        public override void Dispose()
        {
            DisposeOpticalFlow();

            if (prevTrackPtsMat != null)
            {
                prevTrackPtsMat.Dispose();
            }
        }
Beispiel #2
0
        /// <summary>
        /// Raises the web cam texture to mat helper disposed event.
        /// </summary>
        public void OnWebCamTextureToMatHelperDisposed()
        {
            Debug.Log("OnWebCamTextureToMatHelperDisposed");

            matOpFlowThis.Dispose();
            matOpFlowPrev.Dispose();
            MOPcorners.Dispose();
            mMOP2fptsThis.Dispose();
            mMOP2fptsPrev.Dispose();
            mMOP2fptsSafe.Dispose();
            mMOBStatus.Dispose();
            mMOFerr.Dispose();
        }
Beispiel #3
0
        /// <summary>
        /// To release the resources for the initialized method.
        /// </summary>
        public override void Dispose()
        {
            if (src_points != null)
            {
                src_points.Clear();
            }

            DisposeKalmanFilter();

            if (prevTrackPtsMat != null)
            {
                prevTrackPtsMat.Dispose();
            }
        }
Beispiel #4
0
    //辨識輪廓
    private bool analysisContoursRect(int index, List <MatOfPoint> contours, Mat result, List <MatchObject> matchObject)
    {
        OpenCVForUnity.Rect _testDepthRect = Imgproc.boundingRect(contours[index]);
        float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (_testDepthRect.area() > minAreaSize)
        {
            //宣告放置點資料
            MatOfInt          hullInt       = new MatOfInt();
            List <Point>      hullPointList = new List <Point>();
            MatOfPoint        hullPointMat  = new MatOfPoint();
            List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
            MatOfInt4         defects       = new MatOfInt4();
            //篩選點資料
            MatOfPoint2f Temp2f = new MatOfPoint2f();
            //Convert contours(i) from MatOfPoint to MatOfPoint2f
            contours[index].convertTo(Temp2f, CvType.CV_32FC2);
            //Processing on mMOP2f1 which is in type MatOfPoint2f
            Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
            //Convert back to MatOfPoint and put the new values back into the contours list
            Temp2f.convertTo(contours[index], CvType.CV_32S);

            //计算轮廓围绕的凸形壳
            Imgproc.convexHull(contours[index], hullInt);
            List <Point> pointMatList = contours[index].toList();
            List <int>   hullIntList  = hullInt.toList();
            for (int j = 0; j < hullInt.toList().Count; j++)
            {
                hullPointList.Add(pointMatList[hullIntList[j]]);
                hullPointMat.fromList(hullPointList);
                hullPoints.Add(hullPointMat);
            }
            if (hullInt.toList().Count == 4)
            {
                if (!setMatchObject(index, pointMatList, contours, hullPoints, result, matchObject))
                {
                    //Debug.Log("setMatchObject fail");
                }
            }
            //清空記憶體
            defects.Dispose();
            hullPointList.Clear();
            hullPointMat.Dispose();
            hullInt.Dispose();
            hullPoints.Clear();
            return(true);
        }
        return(false);
    }
Beispiel #5
0
        /// <summary>
        /// Raises the web cam texture to mat helper disposed event.
        /// </summary>
        public void OnWebCamTextureToMatHelperDisposed()
        {
            Debug.Log("OnWebCamTextureToMatHelperDisposed");

            if (texture != null)
            {
                Texture2D.Destroy(texture);
                texture = null;
            }

            if (matOpFlowThis != null)
            {
                matOpFlowThis.Dispose();
            }
            if (matOpFlowPrev != null)
            {
                matOpFlowPrev.Dispose();
            }
            if (MOPcorners != null)
            {
                MOPcorners.Dispose();
            }
            if (mMOP2fptsThis != null)
            {
                mMOP2fptsThis.Dispose();
            }
            if (mMOP2fptsPrev != null)
            {
                mMOP2fptsPrev.Dispose();
            }
            if (mMOP2fptsSafe != null)
            {
                mMOP2fptsSafe.Dispose();
            }
            if (mMOBStatus != null)
            {
                mMOBStatus.Dispose();
            }
            if (mMOFerr != null)
            {
                mMOFerr.Dispose();
            }
        }
    void OnDisable()
    {
        Debug.Log("OnDisable");

        if (matOpFlowThis != null)
        {
            matOpFlowThis.Dispose();
        }
        if (matOpFlowPrev != null)
        {
            matOpFlowPrev.Dispose();
        }
        if (MOPcorners != null)
        {
            MOPcorners.Dispose();
        }
        if (mMOP2fptsThis != null)
        {
            mMOP2fptsThis.Dispose();
        }
        if (mMOP2fptsPrev != null)
        {
            mMOP2fptsPrev.Dispose();
        }
        if (mMOP2fptsSafe != null)
        {
            mMOP2fptsSafe.Dispose();
        }
        if (mMOBStatus != null)
        {
            mMOBStatus.Dispose();
        }
        if (mMOFerr != null)
        {
            mMOFerr.Dispose();
        }
        //webCamTextureToMatHelper.Dispose();
    }
Beispiel #7
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IPHONE && !UNITY_EDITOR
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.transform.eulerAngles = new Vector3(0, 0, 0);
                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
                                                                                #endif
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										if (webCamTexture.videoRotationAngle == 270)
//												scaleY = -1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    Camera.main.orthographicSize = webCamTexture.width / 2;
                                                                                #else
                    Camera.main.orthographicSize = webCamTexture.height / 2;
                                                                                #endif

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IPHONE && !UNITY_EDITOR
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.width / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("OpenCVForUnitySample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            GUILayout.EndVertical();
        }
    }
}
Beispiel #8
0
    //利用深度的輪廓做RGB的顏色判斷
    public Mat getContours(Mat srcColorMat, Mat srcDepthMat)
    {
        Mat ColorMat = new Mat();
        Mat DepthMat = new Mat();
        Mat HsvMat   = new Mat();

        srcColorMat.copyTo(ColorMat);
        srcDepthMat.copyTo(DepthMat);
        Imgproc.cvtColor(ColorMat, HsvMat, Imgproc.COLOR_BGR2HSV);

        List <ColorObject> colorObjects        = new List <ColorObject>();
        Mat                  resultMat         = new Mat(DepthMat.height(), DepthMat.width(), CvType.CV_8UC1);
        Mat                  hierarchy         = new Mat();
        List <Point>         ConsistP          = new List <Point>();
        List <MatOfPoint>    contours          = new List <MatOfPoint>();
        List <List <Point> > trianglePointList = new List <List <Point> >();

        Imgproc.findContours(DepthMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        int           numObjects    = contours.Count;
        List <Scalar> clickRGB      = new List <Scalar>();
        List <Scalar> clickHSV      = new List <Scalar>();
        List <int>    HullCountList = new List <int>();

        for (int i = 0; i < numObjects; i++)
        {
            Imgproc.drawContours(resultMat, contours, i, new Scalar(255), 1);
        }
        double[] GetRGB      = new double[10];
        float    minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (numObjects > 0)
        {
            for (int index = 0; index < numObjects; index++)
            {
                OpenCVForUnity.Rect R0 = Imgproc.boundingRect(contours[index]);

                if (R0.area() > minAreaSize)
                {
                    //宣告放置點資料
                    MatOfInt          hullInt       = new MatOfInt();
                    List <Point>      hullPointList = new List <Point>();
                    MatOfPoint        hullPointMat  = new MatOfPoint();
                    List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
                    MatOfInt4         defects       = new MatOfInt4();
                    //篩選點資料
                    MatOfPoint2f Temp2f = new MatOfPoint2f();
                    //Convert contours(i) from MatOfPoint to MatOfPoint2f
                    contours[index].convertTo(Temp2f, CvType.CV_32FC2);
                    //Processing on mMOP2f1 which is in type MatOfPoint2f
                    Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
                    //Convert back to MatOfPoint and put the new values back into the contours list
                    Temp2f.convertTo(contours[index], CvType.CV_32S);

                    //计算轮廓围绕的凸形壳
                    Imgproc.convexHull(contours[index], hullInt);
                    List <Point> pointMatList = contours[index].toList();
                    List <int>   hullIntList  = hullInt.toList();
                    for (int j = 0; j < hullInt.toList().Count; j++)
                    {
                        hullPointList.Add(pointMatList[hullIntList[j]]);
                        hullPointMat.fromList(hullPointList);
                        hullPoints.Add(hullPointMat);
                    }
                    ConsistP.Add(new Point(R0.x, R0.y));
                    ConsistP.Add(new Point(R0.x + R0.width, R0.y + R0.height));
                    ConsistP.Add(new Point(R0.x + R0.width, R0.y));
                    ConsistP.Add(new Point(R0.x, R0.y + R0.height));
                    clickRGB.Add(clickcolor(ColorMat, R0));
                    clickHSV.Add(clickcolor(HsvMat, R0));
                    HullCountList.Add(hullIntList.Count);
                    trianglePointList.Add(pointMatList);
                    //清空記憶體
                    defects.Dispose();
                    hullPointList.Clear();
                    hullPointMat.Dispose();
                    hullInt.Dispose();
                    hullPoints.Clear();


                    //Debug.Log("ID = " +  index + " Color = " + clickcolor(ColorMat, R0));
                }
            }
            //使用顏色找尋物體
            _matchColorObjectList = setColorMatchObject(ConsistP, trianglePointList, clickRGB, clickHSV, resultMat, HullCountList);
        }
        return(resultMat);
    }
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                    while (webCamTexture.width <= 16)
                    {
                        webCamTexture.GetPixels32();
                        yield return(new WaitForEndOfFrame());
                    }
                                                                                #endif
                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                    updateLayout();

                    screenOrientation = Screen.orientation;
                    initDone          = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }
    private void FindDefects(Mat maskImage, ref int cx, ref int cy, int min_defects_count, int max_defects_count)
    {
        int erosion_size = 1;

        Mat element = Imgproc.getStructuringElement(
            Imgproc.MORPH_ELLIPSE,
            new Size(2 * erosion_size + 1, 2 * erosion_size + 1),
            new Point(erosion_size, erosion_size));

        // dilate and erode
        Imgproc.dilate(maskImage, maskImage, element);
        Imgproc.erode(maskImage, maskImage, element);
        element.Dispose();
        //Find Contours in image
        List <MatOfPoint> contours = new List <MatOfPoint>();

        Imgproc.findContours(maskImage, contours, new MatOfPoint(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        //Loop to find the biggest contour; If no contour is found index=-1
        int    index = -1;
        double area  = 2000;

        for (int i = 0; i < contours.Count; i++)
        {
            var tempsize = Imgproc.contourArea(contours[i]);
            if (tempsize > area)
            {
                area  = tempsize;
                index = i;
            }
        }

        if (index == -1)
        {
            return;
        }
        else
        {
            var points = new MatOfPoint(contours[index].toArray());
            var hull   = new MatOfInt();
            Imgproc.convexHull(points, hull, false);

            var defects = new MatOfInt4();
            Imgproc.convexityDefects(points, hull, defects);

            var start_points = new MatOfPoint2f();
            var far_points   = new MatOfPoint2f();

            for (int i = 0; i < defects.size().height; i++)
            {
                int    ind_start = (int)defects.get(i, 0)[0];
                int    ind_end   = (int)defects.get(i, 0)[1];
                int    ind_far   = (int)defects.get(i, 0)[2];
                double depth     = defects.get(i, 0)[3] / 256;

                double a = Core.norm(contours[index].row(ind_start) - contours[index].row(ind_end));
                double b = Core.norm(contours[index].row(ind_far) - contours[index].row(ind_start));
                double c = Core.norm(contours[index].row(ind_far) - contours[index].row(ind_end));

                double angle = Math.Acos((b * b + c * c - a * a) / (2 * b * c)) * 180.0 / Math.PI;

                double threshFingerLength = ((double)maskImage.height()) / 8.0;
                double threshAngle        = 80;

                if (angle < threshAngle && depth > threshFingerLength)
                {
                    //start point
                    var aa = contours[index].row(ind_start);
                    start_points.push_back(contours[index].row(ind_start));
                    far_points.push_back(contours[index].row(ind_far));
                }
            }

            points.Dispose();
            hull.Dispose();
            defects.Dispose();

            // when no finger found
            if (far_points.size().height < min_defects_count || far_points.size().height > max_defects_count)
            {
                return;
            }

            var cnts = new List <MatOfPoint>();
            cnts.Add(contours[index]);

            Mat mm = new Mat();
            Imgproc.cvtColor(maskImage, mm, Imgproc.COLOR_GRAY2BGR);

            Imgproc.drawContours(mm, cnts, 0, new Scalar(0, 0, 255));
            // OpenCVForUnity.ImgcodecsModule.Imgcodecs.imwrite("D:/tempImg.jpg", mm)

            //var rotatedRect = Imgproc.minAreaRect(far_points);
            var boundingRect = Imgproc.boundingRect(far_points);

            cx = (int)(boundingRect.x + boundingRect.width / 2);
            cy = (int)(boundingRect.y + boundingRect.height / 2);
        }
    }