Ejemplo n.º 1
0
        public static bool isHeart(List <Point> shape)
        {
            //Check number of vertices
            if (shape.Count < 20)
            {
                return(false);
            }

            MatOfPoint shape_area = new MatOfPoint();

            shape_area.fromList(shape);

            MatOfPoint2f shape_area2f = new MatOfPoint2f(shape_area.toArray());

            //   if (Imgproc.contourArea(shape_area) > 6000)
            //       return false;

            double area  = Imgproc.contourArea(shape_area);
            double perim = Imgproc.arcLength(shape_area2f, true);
            double ratio = area / perim;

            if (ratio < 18 || ratio > 23)
            {
                return(false);
            }

            for (int i = 1; i < shape.Count; i++)
            {
                if (distanceTwoPoints(shape[i - 1], shape[i]) > 20)
                {
                    return(true);
                }
            }
            return(false);
        }
Ejemplo n.º 2
0
        // Use this for initialization
        void Start()
        {
            Mat imgMat = new Mat(500, 500, CvType.CV_8UC3, new Scalar(0, 0, 0));

            Debug.Log("imgMat.ToString() " + imgMat.ToString());


            int        rand_num  = 50;
            MatOfPoint pointsMat = new MatOfPoint();

            pointsMat.alloc(rand_num);

            Core.randu(pointsMat, 100, 400);

            Point[] points = pointsMat.toArray();
            for (int i = 0; i < rand_num; ++i)
            {
                Imgproc.circle(imgMat, points [i], 2, new Scalar(255, 255, 255), -1);
            }


            MatOfInt hullInt = new MatOfInt();

            Imgproc.convexHull(pointsMat, hullInt);


            List <Point> pointMatList  = pointsMat.toList();
            List <int>   hullIntList   = hullInt.toList();
            List <Point> hullPointList = new List <Point> ();

            for (int j = 0; j < hullInt.toList().Count; j++)
            {
                hullPointList.Add(pointMatList [hullIntList [j]]);
            }

            MatOfPoint hullPointMat = new MatOfPoint();

            hullPointMat.fromList(hullPointList);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();

            hullPoints.Add(hullPointMat);



            Imgproc.drawContours(imgMat, hullPoints, -1, new Scalar(0, 255, 0), 2);


            Imgproc.cvtColor(imgMat, imgMat, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Ejemplo n.º 3
0
				// Use this for initialization
				void Start ()
				{
	
						Mat imgMat = new Mat (500, 500, CvType.CV_8UC3, new Scalar (0, 0, 0));
						Debug.Log ("imgMat dst ToString " + imgMat.ToString ());


						int rand_num = 50;
						MatOfPoint pointsMat = new MatOfPoint ();
						pointsMat.alloc (rand_num);

						Core.randu (pointsMat, 100, 400);

						Point[] points = pointsMat.toArray ();
						for (int i=0; i<rand_num; ++i) {
						
								Core.circle (imgMat, points [i], 2, new Scalar (255, 255, 255), -1);
						}

	
						MatOfInt hullInt = new MatOfInt ();
						Imgproc.convexHull (pointsMat, hullInt);


						List<Point> pointMatList = pointsMat.toList ();
						List<int> hullIntList = hullInt.toList ();
						List<Point> hullPointList = new List<Point> ();

						for (int j=0; j < hullInt.toList().Count; j++) {
								hullPointList.Add (pointMatList [hullIntList [j]]);
						}

						MatOfPoint hullPointMat = new MatOfPoint ();
		
						hullPointMat.fromList (hullPointList);

						List<MatOfPoint> hullPoints = new List<MatOfPoint> ();

						hullPoints.Add (hullPointMat);
		
		
		
						Imgproc.drawContours (imgMat, hullPoints, -1, new Scalar (0, 255, 0), 2);


						Imgproc.cvtColor (imgMat, imgMat, Imgproc.COLOR_BGR2RGB);

						Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
						Utils.matToTexture2D (imgMat, texture);
		
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				}
Ejemplo n.º 4
0
    //辨識輪廓
    private bool analysisContoursRect(int index, List <MatOfPoint> contours, Mat result, List <MatchObject> matchObject)
    {
        OpenCVForUnity.Rect _testDepthRect = Imgproc.boundingRect(contours[index]);
        float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (_testDepthRect.area() > minAreaSize)
        {
            //宣告放置點資料
            MatOfInt          hullInt       = new MatOfInt();
            List <Point>      hullPointList = new List <Point>();
            MatOfPoint        hullPointMat  = new MatOfPoint();
            List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
            MatOfInt4         defects       = new MatOfInt4();
            //篩選點資料
            MatOfPoint2f Temp2f = new MatOfPoint2f();
            //Convert contours(i) from MatOfPoint to MatOfPoint2f
            contours[index].convertTo(Temp2f, CvType.CV_32FC2);
            //Processing on mMOP2f1 which is in type MatOfPoint2f
            Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
            //Convert back to MatOfPoint and put the new values back into the contours list
            Temp2f.convertTo(contours[index], CvType.CV_32S);

            //计算轮廓围绕的凸形壳
            Imgproc.convexHull(contours[index], hullInt);
            List <Point> pointMatList = contours[index].toList();
            List <int>   hullIntList  = hullInt.toList();
            for (int j = 0; j < hullInt.toList().Count; j++)
            {
                hullPointList.Add(pointMatList[hullIntList[j]]);
                hullPointMat.fromList(hullPointList);
                hullPoints.Add(hullPointMat);
            }
            if (hullInt.toList().Count == 4)
            {
                if (!setMatchObject(index, pointMatList, contours, hullPoints, result, matchObject))
                {
                    //Debug.Log("setMatchObject fail");
                }
            }
            //清空記憶體
            defects.Dispose();
            hullPointList.Clear();
            hullPointMat.Dispose();
            hullInt.Dispose();
            hullPoints.Clear();
            return(true);
        }
        return(false);
    }
Ejemplo n.º 5
0
        public static List <Point> filterPolygon(List <Point> approx_polygon)
        {
            while (true)
            {
                double max_ar    = 0;
                int    max_ar_id = 0;
                for (int k = 0; k < approx_polygon.Count; k++)
                {
                    List <Point> cur_polygon = new List <Point>();

                    for (int i = 0; i < approx_polygon.Count; i++)
                    {
                        cur_polygon.Add(approx_polygon[i]);
                    }

                    cur_polygon.Remove(cur_polygon[0 + k]);

                    MatOfPoint cur_area    = new MatOfPoint();
                    MatOfPoint approx_area = new MatOfPoint();
                    cur_area.fromList(cur_polygon);
                    approx_area.fromList(approx_polygon);

                    double area_ratio = Imgproc.contourArea(cur_area) / Imgproc.contourArea(approx_area);

                    // Debug.Log("ratio" + area_ratio);

                    if (area_ratio > max_ar)
                    {
                        max_ar    = area_ratio;
                        max_ar_id = k;
                    }
                }

                //If area still large enough remove a vertex
                if (max_ar > 0.8)
                {
                    // Debug.Log("Remove vertex  " + max_ar_id);
                    approx_polygon.Remove(approx_polygon.ToArray()[0 + max_ar_id]);
                }
                else
                {
                    break;
                }
            }

            return(approx_polygon);
        }
Ejemplo n.º 6
0
        public static bool isStar(List <Point> shape)
        {
            double[] length = new double[5], angle = new double[5];

            if (shape.Count != 5)
            {
                return(false);
            }

            MatOfPoint shape_area = new MatOfPoint();

            shape_area.fromList(shape);

            if (!(Imgproc.contourArea(shape_area) > 6000 && Imgproc.contourArea(shape_area) < 10000))
            {
                return(false);
            }

            //Calculate side lengths
            length[0] = distanceTwoPoints(shape[0], shape[1]);
            length[1] = distanceTwoPoints(shape[1], shape[2]);
            length[2] = distanceTwoPoints(shape[2], shape[3]);
            length[3] = distanceTwoPoints(shape[3], shape[4]);
            length[4] = distanceTwoPoints(shape[4], shape[0]);

            //Calculate angles
            angle[0] = angleThreePoints(shape[0], shape[1], shape[2]);
            angle[1] = angleThreePoints(shape[1], shape[2], shape[3]);
            angle[2] = angleThreePoints(shape[2], shape[3], shape[4]);
            angle[3] = angleThreePoints(shape[3], shape[4], shape[0]);
            angle[4] = angleThreePoints(shape[4], shape[0], shape[1]);

            //Star check
            if (angle[0] > 98 && angle[0] < 128 &&
                angle[1] > 98 && angle[1] < 128 &&
                angle[2] > 98 && angle[2] < 128 &&
                angle[3] > 98 && angle[3] < 128 &&
                angle[4] > 98 && angle[4] < 128)
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
Ejemplo n.º 7
0
        public static bool isSquare(List <Point> shape)
        {
            double [] length = new double[4], angle = new double[4];

            //Check number of vertices
            //cout << "	Vertex Num: " << shape.size() << endl;
            //cout << "	Area: " << contourArea(shape) << endl;

            if (shape.Count != 4)
            {
                return(false);
            }

            MatOfPoint shape_area = new MatOfPoint();

            shape_area.fromList(shape);

            //   if (!(Imgproc.contourArea(shape_area) > 8000 && Imgproc.contourArea(shape_area) < 12000))
            //       return false;
            //
            //Calculate side lengths
            length[0] = distanceTwoPoints(shape[0], shape[1]);
            length[1] = distanceTwoPoints(shape[1], shape[2]);
            length[2] = distanceTwoPoints(shape[2], shape[3]);
            length[3] = distanceTwoPoints(shape[3], shape[0]);

            //Calculate angles
            angle[0] = angleThreePoints(shape[0], shape[1], shape[2]);
            angle[1] = angleThreePoints(shape[1], shape[2], shape[3]);
            angle[2] = angleThreePoints(shape[2], shape[3], shape[0]);
            angle[3] = angleThreePoints(shape[3], shape[0], shape[1]);

            //Square check
            if (angle[0] > 80 && angle[0] < 100 &&
                angle[1] > 80 && angle[1] < 100 &&
                angle[2] > 80 && angle[2] < 100 &&
                angle[3] > 80 && angle[3] < 100)
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
Ejemplo n.º 8
0
        public static bool isTriangle(List <Point> shape)
        {
            //cout << "	Vertex Num: " << shape.size() << endl;
            //cout << "	Area: " << contourArea(shape) << endl;

            //Check number of vertices
            if (shape.Count != 3)
            {
                return(false);
            }

            //Check shape angles
            double [] length = new double[3], angle = new double[3];
            angle[0] = angleThreePoints(shape[0], shape[1], shape[2]);
            angle[1] = angleThreePoints(shape[1], shape[2], shape[0]);
            angle[2] = angleThreePoints(shape[2], shape[0], shape[1]);

            //if (angle[0] > ANGLE90_LOW && angle[0] < ANGLE90_HIGH)
            //	cout << "	Angles:" << angle[0] << " " << angle[1] << " " << angle[2] << endl;

            MatOfPoint shape_area = new MatOfPoint();

            shape_area.fromList(shape);

            if (!(Imgproc.contourArea(shape_area) > 3300 && Imgproc.contourArea(shape_area) < 6000))
            {
                return(false);
            }

            if ((angle[0] > 50 && angle[0] < 70 && angle[1] > 50 && angle[1] < 70 && angle[2] > 50 && angle[2] < 70))
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
Ejemplo n.º 9
0
    //利用深度的輪廓做RGB的顏色判斷
    public Mat getContours(Mat srcColorMat, Mat srcDepthMat)
    {
        Mat ColorMat = new Mat();
        Mat DepthMat = new Mat();
        Mat HsvMat   = new Mat();

        srcColorMat.copyTo(ColorMat);
        srcDepthMat.copyTo(DepthMat);
        Imgproc.cvtColor(ColorMat, HsvMat, Imgproc.COLOR_BGR2HSV);

        List <ColorObject> colorObjects        = new List <ColorObject>();
        Mat                  resultMat         = new Mat(DepthMat.height(), DepthMat.width(), CvType.CV_8UC1);
        Mat                  hierarchy         = new Mat();
        List <Point>         ConsistP          = new List <Point>();
        List <MatOfPoint>    contours          = new List <MatOfPoint>();
        List <List <Point> > trianglePointList = new List <List <Point> >();

        Imgproc.findContours(DepthMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        int           numObjects    = contours.Count;
        List <Scalar> clickRGB      = new List <Scalar>();
        List <Scalar> clickHSV      = new List <Scalar>();
        List <int>    HullCountList = new List <int>();

        for (int i = 0; i < numObjects; i++)
        {
            Imgproc.drawContours(resultMat, contours, i, new Scalar(255), 1);
        }
        double[] GetRGB      = new double[10];
        float    minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (numObjects > 0)
        {
            for (int index = 0; index < numObjects; index++)
            {
                OpenCVForUnity.Rect R0 = Imgproc.boundingRect(contours[index]);

                if (R0.area() > minAreaSize)
                {
                    //宣告放置點資料
                    MatOfInt          hullInt       = new MatOfInt();
                    List <Point>      hullPointList = new List <Point>();
                    MatOfPoint        hullPointMat  = new MatOfPoint();
                    List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
                    MatOfInt4         defects       = new MatOfInt4();
                    //篩選點資料
                    MatOfPoint2f Temp2f = new MatOfPoint2f();
                    //Convert contours(i) from MatOfPoint to MatOfPoint2f
                    contours[index].convertTo(Temp2f, CvType.CV_32FC2);
                    //Processing on mMOP2f1 which is in type MatOfPoint2f
                    Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
                    //Convert back to MatOfPoint and put the new values back into the contours list
                    Temp2f.convertTo(contours[index], CvType.CV_32S);

                    //计算轮廓围绕的凸形壳
                    Imgproc.convexHull(contours[index], hullInt);
                    List <Point> pointMatList = contours[index].toList();
                    List <int>   hullIntList  = hullInt.toList();
                    for (int j = 0; j < hullInt.toList().Count; j++)
                    {
                        hullPointList.Add(pointMatList[hullIntList[j]]);
                        hullPointMat.fromList(hullPointList);
                        hullPoints.Add(hullPointMat);
                    }
                    ConsistP.Add(new Point(R0.x, R0.y));
                    ConsistP.Add(new Point(R0.x + R0.width, R0.y + R0.height));
                    ConsistP.Add(new Point(R0.x + R0.width, R0.y));
                    ConsistP.Add(new Point(R0.x, R0.y + R0.height));
                    clickRGB.Add(clickcolor(ColorMat, R0));
                    clickHSV.Add(clickcolor(HsvMat, R0));
                    HullCountList.Add(hullIntList.Count);
                    trianglePointList.Add(pointMatList);
                    //清空記憶體
                    defects.Dispose();
                    hullPointList.Clear();
                    hullPointMat.Dispose();
                    hullInt.Dispose();
                    hullPoints.Clear();


                    //Debug.Log("ID = " +  index + " Color = " + clickcolor(ColorMat, R0));
                }
            }
            //使用顏色找尋物體
            _matchColorObjectList = setColorMatchObject(ConsistP, trianglePointList, clickRGB, clickHSV, resultMat, HullCountList);
        }
        return(resultMat);
    }
Ejemplo n.º 10
0
        void Run()
        {
            rgbaMat = Imgcodecs.imread(Application.dataPath + "/Resources/changer.jpg", 1);
            Imgproc.cvtColor(rgbaMat, rgbaMat, Imgproc.COLOR_BGR2RGBA);

            //1. 人脸dlib检测
            List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect>();

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
            List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

            foreach (UnityEngine.Rect unityRect in result)
            {
                detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
            }
            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
            List <List <Vector2> > landmarkPoints = new List <List <Vector2> >();

            OpenCVForUnity.Rect openCVRect = detectResult[0];
            UnityEngine.Rect    rect       = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
            List <Vector2>      points     = faceLandmarkDetector.DetectLandmark(rect); //通过检测器从rect中提取point

            landmarkPoints.Add(points);

            //2. 计算凸包
            pointList = new List <Point>();
            for (int i = 0; i < points.Count; i++)
            {
                //绘制点
                //Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 2, new Scalar(255, 255, 255), -1);
                Point pt = new Point(landmarkPoints[0][i].x, landmarkPoints[0][i].y);
                Imgproc.circle(rgbaMat, pt, 0, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_8, 0); //绘制68点
                pointList.Add(pt);
            }
            //Debug.Log(pointList.Count); //68

            //3. 三角剖份
            TriangleDivide();

            //4. (遍历三角形)仿射变换
            Affine();

            //5. 显示
            MatOfPoint pointsMat = new MatOfPoint();

            pointsMat.fromList(pointList);
            MatOfInt hullInt = new MatOfInt();

            Imgproc.convexHull(pointsMat, hullInt);
            List <Point> pointMatList  = pointsMat.toList();
            List <int>   hullIntList   = hullInt.toList();
            List <Point> hullPointList = new List <Point>();

            for (int j = 0; j < hullInt.toList().Count; j++)
            {
                hullPointList.Add(pointMatList[hullIntList[j]]);
            }
            MatOfPoint hullPointMat = new MatOfPoint();

            hullPointMat.fromList(hullPointList); //23*1
            List <MatOfPoint> hullPoints = new List <MatOfPoint>();

            hullPoints.Add(hullPointMat); //1
            Imgproc.drawContours(rgbaMat, hullPoints, -1, new Scalar(255, 255, 0, 255), 2);

            //------------------------------------------------//
            //Try---------------------------------------------//

            dstPointList = new List <Point>();
            for (int i = 0; i < pointList.Count; i++)
            {
                Point pt = pointList[i] + new Point(100, 0);
                dstPointList.Add(pt);
            }
            //MatOfPoint2f srcTri = new MatOfPoint2f(); //不能超过3
            //MatOfPoint2f dstTri = new MatOfPoint2f(); //不能超过3
            //srcTri.fromList(pointList);
            //srcTri.fromList(dstPointList);
            for (int j = 0; j < 3; j++)
            {
                //srcTri.push_back(hullPointMat);
                //dstTri.push_back(hull2[corpd.index[j]]);
            }


            Mat        mask = Mat.zeros(rgbaMat.size(), CvType.CV_8UC1);
            Point      p0   = new Point(0, 0);
            Point      p1   = new Point(0, 256);
            Point      p2   = new Point(256, 0);
            Point      p3   = new Point(256, 0);
            Point      p4   = new Point(512, 0);
            Point      p5   = new Point(512, 256);
            Point      p6   = new Point(256, 64);
            MatOfPoint pts1 = new MatOfPoint(new Point[3] {
                p0, p1, p2
            });
            MatOfPoint pts2 = new MatOfPoint(new Point[3] {
                p3, p4, p5
            });
            MatOfPoint2f srcTri = new MatOfPoint2f(new Point[3] {
                p0, p1, p2
            });
            MatOfPoint2f dstTri = new MatOfPoint2f(new Point[3] {
                p0, p1, p6
            });
            List <MatOfPoint> contour = new List <MatOfPoint>()
            {
                pts1
            };

            for (int i = 0; i < contour.Count; i++)
            {
                //轮廓提取
                Imgproc.drawContours(mask, contour, i, new Scalar(255), -1); //全部放到mask上
            }
            rgbaMat.copyTo(mask, mask);
            Mat warpMat   = Imgproc.getAffineTransform(srcTri, dstTri);
            Mat warpImage = Mat.zeros(mask.size(), mask.type());

            Imgproc.warpAffine(mask, warpImage, warpMat, warpImage.size());

            //------------------------------------------------//

            Texture2D t2d = new Texture2D(rgbaMat.width(), rgbaMat.height(), TextureFormat.RGBA32, false);

            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, t2d);
            Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

            srcImage.sprite         = sp;
            srcImage.preserveAspect = true;

            //warpImage
            Texture2D dst_t2d = new Texture2D(warpImage.width(), warpImage.height(), TextureFormat.RGBA32, false);

            OpenCVForUnity.Utils.matToTexture2D(warpImage, dst_t2d);
            Sprite dst_sp = Sprite.Create(dst_t2d, new UnityEngine.Rect(0, 0, dst_t2d.width, dst_t2d.height), Vector2.zero);

            dstImage.sprite         = dst_sp;
            dstImage.preserveAspect = true;
        }
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(rgbaMat);

            //Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));

            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //(int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");

            Point bottomLeft  = new Point(boundRect.x, boundRect.y + boundRect.height);
            Point topLeft     = new Point(boundRect.x, boundRect.y);
            Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height);
            Point topRight    = new Point(boundRect.x + boundRect.width, boundRect.y);

            rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y),                                      //topleft
                                          new Point(boundRect.x + boundRect.width, boundRect.y),                    //Top Right
                                          new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right
                                          new Point(boundRect.x, boundRect.y + boundRect.height)                    //Bottom Left
                                          );

            //double a = boundRect.br ().y - boundRect.tl ().y;
            //a = a * 0.7;
            //a = boundRect.tl ().y + a;

            //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

            List <Point3> m_markerCorners3dList = new List <Point3>();

            m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A)
            m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B)
            m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C)
            m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D)
            m_markerCorners3d.fromList(m_markerCorners3dList);

            //estimate pose
            Mat Rvec = new Mat();
            Mat Tvec = new Mat();
            Mat raux = new Mat();
            Mat taux = new Mat();

            Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux);

            raux.convertTo(Rvec, CvType.CV_32F);
            taux.convertTo(Tvec, CvType.CV_32F);

            rotMat = new Mat(3, 3, CvType.CV_64FC1);
            Calib3d.Rodrigues(Rvec, rotMat);

            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            //Debug.Log ("transformationM " + transformationM.ToString ());

            Rvec.Dispose();
            Tvec.Dispose();
            raux.Dispose();
            taux.Dispose();
            rotMat.Dispose();

            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
            //Debug.Log("arM " + ARM.ToString());

            if (ARGameObject != null)
            {
                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                if (deactivateCoroutine == null)
                {
                    deactivateCoroutine = StartCoroutine(Wait(10.0f));
                }
                ARGameObject.SetActive(true);
            }

            //end pose estimation

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <Point> listPoDefect = new List <Point>();

            if (convexDefect.rows() > 0)
            {
                List <int>   convexDefectList = convexDefect.toList();
                List <Point> contourList      = contour.toList();
                for (int j = 0; j < convexDefectList.Count; j = j + 4)
                {
                    Point farPoint = contourList[convexDefectList[j + 2]];
                    int   depth    = convexDefectList[j + 3];
                    //if (depth > threasholdSlider.value && farPoint.y < a)
                    //{
                    //    listPoDefect.Add(contourList[convexDefectList[j + 2]]);
                    //}
                    //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
                }
            }


            Debug.Log("hull: " + hull.toList());
            if (convexDefect.rows() > 0)
            {
                Debug.Log("defects: " + convexDefect.toList());
            }

            //use these contours to do heart detection
            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            int defectsTotal = (int)convexDefect.total();

            Debug.Log("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

            Debug.Log("numberOfFingers " + numberOfFingers);

            Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Ejemplo n.º 12
0
	void Process() {
		string imText = "DRAW PATTERN";
		Core.putText(frame_pot, imText, new Point(110, 50), Core.FONT_HERSHEY_COMPLEX, 1.0, new Scalar(255, 0, 0), 2);	

		Mat hierarchy = new Mat ();
		List<MatOfPoint> contours = new List<MatOfPoint> ();
		MatOfPoint maxitem = new MatOfPoint ();
		MatOfInt hullInt = new MatOfInt ();
		
		frameclone = frame_thresh_final.clone ();
		Imgproc.findContours (frameclone, contours, hierarchy, Imgproc.RETR_LIST , Imgproc.CHAIN_APPROX_NONE);
		
		maxitem = contours [0];
		n = 0;
		for(int i=0; i<contours.Count; i++){
			if(contours[i].total() > maxitem.total()){
				maxitem = contours[i];
				n=i;
			}
		}
		
		OpenCVForUnity.Rect bRect = Imgproc.boundingRect (maxitem);
		int bRect_height = bRect.height;
		int bRect_width = bRect.width;
		if (bRect_height < 200 || bRect_width < 200)
			return;
		
		// Drawing Contours on the Frame
		//Imgproc.drawContours (frame_pot, contours, n, new Scalar(0, 255, 0), 2);
		
		Imgproc.convexHull (maxitem, hullInt);

		List<Point> maxitemPointList = maxitem.toList ();
		List<int> hullIntList = hullInt.toList ();
		List<Point> hullPointList = new List<Point> ();
		
		for (int j=0; j < hullInt.toList().Count; j++) {
			hullPointList.Add (maxitemPointList [hullIntList [j]]);
		}
		
		MatOfPoint hullPointMat = new MatOfPoint ();
		hullPointMat.fromList (hullPointList);

		List<MatOfPoint> hullPoints = new List<MatOfPoint> ();
		hullPoints.Add (hullPointMat);
		
		// Drawing Convex Hull on the Frame
		//Imgproc.drawContours (frame_pot, hullPoints, -1, new Scalar (0, 0, 255), 2);
		
		MatOfInt4 convexityDef = new MatOfInt4 ();
		Imgproc.convexityDefects (maxitem, hullInt, convexityDef);
		
		List<int> conDefIntList = convexityDef.toList ();
		List<Point> startpts = new List<Point> ();
		List<Point> farpts = new List<Point> ();
		List<Point> endpts = new List<Point> ();
		
		int tolerance = (int)(bRect_height/6);
		//Debug.Log ("Tolerance: " + tolerance);
		int[] defarray = new int[100];

		int coordX = 10000, coordY = 10000;

		int x1 = (int) sphere1.transform.position.x; 
		int y1 = (int) sphere1.transform.position.y;
		int x2 = (int) sphere2.transform.position.x;
		int y2 = (int) sphere2.transform.position.y;
		int x3 = (int) sphere3.transform.position.x; 
		int y3 = (int) sphere3.transform.position.y;
		int x4 = (int) sphere4.transform.position.x; 
		int y4 = (int) sphere4.transform.position.y;		

		Point pointer = new Point();

		for(int i=0; i < conDefIntList.Count/4 ; i++) {
			startpts.Add(maxitemPointList[conDefIntList[4*i]]);
			endpts.Add(maxitemPointList[conDefIntList[4*i+1]]);
			farpts.Add(maxitemPointList[conDefIntList[4*i+2]]);
			
			Point s = startpts[i];
			Point e = endpts[i];
			Point f = farpts[i];

			if (GetDistance(s,f) > tolerance) {
				//Core.circle(frame_pot, s, 15, new Scalar(255, 225, 0), -1);
				if (s.y < coordY) {
					pointer = s;
					coordY = (int) s.y;
					coordX = (int) s.x;
				}
			}
		}

		Core.circle(frame_pot, pointer, 15, new Scalar(255, 225, 0), -1);

		coordX = coordX - 240;
		coordY = -coordY + 320;

		if (coordX > x1-50 && coordX < x1+50 && coordY > y1-50 && coordY < y1+50) {
			if (previous.Equals('1'))
				return;
			input += "1";
			AddLine(previous, '1');
			previous = '1';
			Material mat1 = sphere1.GetComponent<Renderer>().material;
			mat1.color = Color.yellow;
			StartCoroutine(WaitAndChangeColor("1"));
		} else if (coordX > x2-50 && coordX < x2+50 && coordY > y2-50 && coordY < y2+50) {
			if (previous.Equals('2'))
				return;
			input += "2";
			AddLine(previous, '2');
			previous = '2';
			Material mat2 = sphere2.GetComponent<Renderer>().material;
			mat2.color = Color.yellow;
			StartCoroutine(WaitAndChangeColor("2"));
		} else if (coordX > x3-50 && coordX < x3+50 && coordY > y3-50 && coordY < y3+50) {
			if (previous.Equals('3'))
				return;
			input += "3";
			AddLine(previous, '3');
			previous = '3';
			Material mat3 = sphere3.GetComponent<Renderer>().material;
			mat3.color = Color.yellow;
			StartCoroutine(WaitAndChangeColor("3"));
		} else if (coordX > x4-50 && coordX < x4+50 && coordY > y4-50 && coordY < y4+50) {
			if (previous.Equals('4'))
				return;
			input += "4";
			AddLine(previous, '4');
			previous = '4';
			Material mat4 = sphere4.GetComponent<Renderer>().material;
			mat4.color = Color.yellow;
			StartCoroutine(WaitAndChangeColor("4"));
		}

		if (input.Length == password.Length) {
			auth = true;
			if (input.Equals(password)) {
				correct = true;
			} else {
				correct = false;
			}
		}
	}
Ejemplo n.º 13
0
        /// <summary>
        /// Processes points by filter.
        /// </summary>
        /// <param name="img">Image mat.</param>
        /// <param name="srcPoints">Input points.</param>
        /// <param name="dstPoints">Output points.</param>
        /// <param name="drawDebugPoints">if true, draws debug points.</param>
        /// <returns>Output points.</returns>
        public override List <Vector2> Process(Mat img, List <Vector2> srcPoints, List <Vector2> dstPoints = null, bool drawDebugPoints = false)
        {
            if (srcPoints != null && srcPoints.Count != numberOfElements)
            {
                throw new ArgumentException("The number of elements is different.");
            }

            if (srcPoints == null)
            {
                return(dstPoints == null ? srcPoints : dstPoints);
            }

            if (!flag)
            {
                if (img.channels() == 4)
                {
                    Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGBA2GRAY);
                }
                else if (img.channels() == 3)
                {
                    Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGB2GRAY);
                }
                else
                {
                    if (prevgray.total() == 0)
                    {
                        prevgray = img.clone();
                    }
                    else
                    {
                        img.copyTo(prevgray);
                    }
                }

                for (int i = 0; i < numberOfElements; i++)
                {
                    prevTrackPts[i] = new Point(srcPoints[i].x, srcPoints[i].y);
                }

                flag = true;
            }

            if (srcPoints != null)
            {
                if (dstPoints == null)
                {
                    dstPoints = new List <Vector2>();
                }
                if (dstPoints != null && dstPoints.Count != numberOfElements)
                {
                    dstPoints.Clear();
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints.Add(new Vector2());
                    }
                }

                if (img.channels() == 4)
                {
                    Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGBA2GRAY);
                }
                else if (img.channels() == 3)
                {
                    Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGB2GRAY);
                }
                else
                {
                    if (gray.total() == 0)
                    {
                        gray = img.clone();
                    }
                    else
                    {
                        img.copyTo(gray);
                    }
                }

                if (prevgray.total() > 0)
                {
                    mOP2fPrevTrackPts.fromList(prevTrackPts);
                    mOP2fNextTrackPts.fromList(nextTrackPts);
                    Video.calcOpticalFlowPyrLK(prevgray, gray, mOP2fPrevTrackPts, mOP2fNextTrackPts, status, err);
                    prevTrackPts = mOP2fPrevTrackPts.toList();
                    nextTrackPts = mOP2fNextTrackPts.toList();

                    // clac diffDlib
                    prevTrackPtsMat.fromList(prevTrackPts);
                    OpenCVForUnity.CoreModule.Rect rect = Imgproc.boundingRect(prevTrackPtsMat);
                    double diffDlib = this.diffDlib * rect.area() / 40000.0 * diffCheckSensitivity;

                    // if the face is moving so fast, use dlib to detect the face
                    double diff = calDistanceDiff(prevTrackPts, nextTrackPts);
                    if (drawDebugPoints)
                    {
                        Debug.Log("variance:" + diff);
                    }
                    if (diff > diffDlib)
                    {
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            nextTrackPts[i].x = srcPoints[i].x;
                            nextTrackPts[i].y = srcPoints[i].y;

                            dstPoints[i] = srcPoints[i];
                        }

                        if (drawDebugPoints)
                        {
                            Debug.Log("DLIB");
                            for (int i = 0; i < numberOfElements; i++)
                            {
                                Imgproc.circle(img, new Point(srcPoints[i].x, srcPoints[i].y), 2, new Scalar(255, 0, 0, 255), -1);
                            }
                        }
                    }
                    else
                    {
                        // In this case, use Optical Flow
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            dstPoints[i] = new Vector2((float)nextTrackPts[i].x, (float)nextTrackPts[i].y);
                        }

                        if (drawDebugPoints)
                        {
                            Debug.Log("Optical Flow");
                            for (int i = 0; i < numberOfElements; i++)
                            {
                                Imgproc.circle(img, nextTrackPts[i], 2, new Scalar(0, 0, 255, 255), -1);
                            }
                        }
                    }
                }
                Swap(ref prevTrackPts, ref nextTrackPts);
                Swap(ref prevgray, ref gray);
            }
            return(dstPoints);
        }
Ejemplo n.º 14
0
	void Process(){
		Mat hierarchy = new Mat ();
		List<MatOfPoint> contours = new List<MatOfPoint> ();
		MatOfPoint maxitem = new MatOfPoint ();
		MatOfInt hullInt = new MatOfInt ();
		
		frameclone = frame_thresh_final.clone ();
		
		Imgproc.findContours (frameclone, contours, hierarchy, Imgproc.RETR_LIST , Imgproc.CHAIN_APPROX_NONE);
		
		maxitem = contours[0];
		n = 0;
		for(int i=0; i<contours.Count; i++){
			
			if(contours[i].total() > maxitem.total()){
				maxitem = contours[i];
				n=i;
			}
		}
		
		OpenCVForUnity.Rect bRect = Imgproc.boundingRect (maxitem);
		int bRect_height =  bRect.height;
		int bRect_width = bRect.width;
		
		Imgproc.drawContours(frame_thresh_final, contours, n, new Scalar(255, 255, 255), -1);
		Imgproc.convexHull( maxitem, hullInt);
		
		List<Point> maxitemPointList = maxitem.toList ();
		List<int> hullIntList = hullInt.toList ();
		List<Point> hullPointList = new List<Point> ();
		
		for (int j=0; j < hullInt.toList().Count; j++) {
			hullPointList.Add (maxitemPointList [hullIntList[j]]);
		}
		
		MatOfPoint hullPointMat = new MatOfPoint ();
		
		hullPointMat.fromList (hullPointList);
		
		List<MatOfPoint> hullPoints = new List<MatOfPoint> ();
		
		hullPoints.Add (hullPointMat);
		
		//Imgproc.drawContours (frame, hullPoints, -1, new Scalar (0, 255, 0), 2);
		
		MatOfInt4 convexityDef = new MatOfInt4 ();
		Imgproc.convexityDefects (maxitem, hullInt, convexityDef);
		
		List<int> conDefIntList = convexityDef.toList ();
		List<Point> startpts = new List<Point> ();
		List<Point> farpts = new List<Point> ();
		List<Point> endpts = new List<Point> ();
		
		double defx1 = 1000, defx2 = 1000;
		int countx1 = 0, countx2 = 0;
		int tolerance = (int)(bRect_height/5.5);
		int count = 0, index = 0;
		//Debug.Log ("Tolerance: " + tolerance);
		double angleTol = 95.0;
		int[] defarray = new int[100];
		//CvFont font = new CvFont (FontFace.Vector0, 1.0, 1.0);
		for(int i=0; i < conDefIntList.Count/4 ; i++){
			startpts.Add(maxitemPointList[conDefIntList[4*i]]);
			endpts.Add(maxitemPointList[conDefIntList[4*i+1]]);
			farpts.Add(maxitemPointList[conDefIntList[4*i+2]]);
			
			Point s = startpts[i];
			Point e = endpts[i];
			Point f = farpts[i];
			
			if( GetAngle(s, f, e) < angleTol && GetDistance(s,f) > tolerance && GetDistance(e,f) > tolerance ){
				//string text = Convert.ToString(count);
				//Debug.Log("Depth1: "+GetDistance(s,f));
				//Debug.Log("Depth2: "+GetDistance(e,f));
				//Core.circle( frame_pot, f, 10, new Scalar(0, 0, 255), -1);
				//Core.circle( frame_pot, s, 10, new Scalar(0, 255, 0), -1);
				//Core.circle( frame_pot, e, 10, new Scalar(255, 0, 0), -1);
				//Core.putText(frame_pot, text, f, Core.FONT_HERSHEY_COMPLEX , 1.0, new Scalar(255, 255, 255)); 
				//frame_pot.PutText(text, f, font, CvColor.White);
				
				if(f.x < defx1){
					defx2 = defx1;
					countx2 = countx1;
					defx1 = f.x;
					countx1 = count;
				}
				else if(f.x < defx2)
				{
					defx2 = f.x;
					countx2 = count;
				}
				defarray[count] = index;
				count++;
			}
			index++;
		}
		//Debug.Log ("Count: " + count);
		//Debug.Log ("Total: " + farpts.Count);
		
		Point point1 = farpts [defarray [countx1]];
		Point point2 = farpts [defarray [countx2]];

		//Core.circle (frame_pot, point1, 15, new Scalar (255, 0, 0), 2);
		//Core.circle (frame_pot, point2, 15, new Scalar (255, 0, 0), 2);

		point1.y -= 5; 
		double posX = (point1.x + point2.x)/2.0;
		double posY = (point1.y + point2.y)/2.0;
		
		posX_new = (float)(posX - 240);
		posY_new = (float)(-posY + 320);

		double dist = Math.Sqrt(Math.Pow(point1.x - point2.x, 2) + Math.Pow(point1.y - point2.y, 2));
		scale1 = dist * 500000 / 640.0; 
		scale2 = dist * 700 / 640.0;
		scale3 = dist * 600 / 640.0;
		scale4 = dist * 15 / 640.0;
		scale5 = dist * 70 / 640.0;

		ringObj[0].transform.position = new Vector3(posX_new, posY_new, 0.0f);
		ringObj[1].transform.position = new Vector3(posX_new, posY_new, 0.0f);
		ringObj[2].transform.position = new Vector3(posX_new, posY_new, 0.0f);
		ringObj[3].transform.position = new Vector3(posX_new, posY_new, 0.0f);
		ringObj[4].transform.position = new Vector3(posX_new, posY_new, 0.0f);

		ringObj[0].transform.localScale = new Vector3((float)scale1, (float)scale1, (float)(scale1*1.5));
		ringObj[1].transform.localScale = new Vector3((float)scale2, (float)scale2, (float)(scale2));
		ringObj[2].transform.localScale = new Vector3((float)scale3, (float)scale3, (float)(scale3));
		ringObj[3].transform.localScale = new Vector3((float)scale4, (float)scale4, (float)(scale4));
		ringObj[4].transform.localScale = new Vector3((float)scale5, (float)scale5, (float)(scale5));
		
		Point point3 = new Point(point1.x, point2.y);
		angle_rot = GetAngle( point1, point2, point3);
		ringObj[0].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot);
		ringObj[1].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot);
		ringObj[2].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot);
		ringObj[3].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot);
		ringObj[4].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot);
	}
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
                        Imgproc.GaussianBlur (rgbaMat, rgbaMat, new OpenCVForUnity.Size (3, 3), 1, 1);
                        //Imgproc.medianBlur(mRgba, mRgba, 3);

                        if (!isColorSelected)
                                return;

                        List<MatOfPoint> contours = detector.getContours ();
                        detector.process (rgbaMat);

            //						Debug.Log ("Contours count: " + contours.Count);

                        if (contours.Count <= 0) {
                                return;
                        }

                        RotatedRect rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [0].toArray ()));

                        double boundWidth = rect.size.width;
                        double boundHeight = rect.size.height;
                        int boundPos = 0;

                        for (int i = 1; i < contours.Count; i++) {
                                rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [i].toArray ()));
                                if (rect.size.width * rect.size.height > boundWidth * boundHeight) {
                                        boundWidth = rect.size.width;
                                        boundHeight = rect.size.height;
                                        boundPos = i;
                                }
                        }

                        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect (new MatOfPoint (contours [boundPos].toArray ()));
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), boundRect.br (), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //						Debug.Log (
            //						" Row start [" +
            //								(int)boundRect.tl ().y + "] row end [" +
            //								(int)boundRect.br ().y + "] Col start [" +
            //								(int)boundRect.tl ().x + "] Col end [" +
            //								(int)boundRect.br ().x + "]");

                        double a = boundRect.br ().y - boundRect.tl ().y;
                        a = a * 0.7;
                        a = boundRect.tl ().y + a;

            //						Debug.Log (
            //						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

                        //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

                        MatOfPoint2f pointMat = new MatOfPoint2f ();
                        Imgproc.approxPolyDP (new MatOfPoint2f (contours [boundPos].toArray ()), pointMat, 3, true);
                        contours [boundPos] = new MatOfPoint (pointMat.toArray ());

                        MatOfInt hull = new MatOfInt ();
                        MatOfInt4 convexDefect = new MatOfInt4 ();
                        Imgproc.convexHull (new MatOfPoint (contours [boundPos].toArray ()), hull);

                        if (hull.toArray ().Length < 3)
                                return;

                        Imgproc.convexityDefects (new MatOfPoint (contours [boundPos]	.toArray ()), hull, convexDefect);

                        List<MatOfPoint> hullPoints = new List<MatOfPoint> ();
                        List<Point> listPo = new List<Point> ();
                        for (int j = 0; j < hull.toList().Count; j++) {
                                listPo.Add (contours [boundPos].toList () [hull.toList () [j]]);
                        }

                        MatOfPoint e = new MatOfPoint ();
                        e.fromList (listPo);
                        hullPoints.Add (e);

                        List<MatOfPoint> defectPoints = new List<MatOfPoint> ();
                        List<Point> listPoDefect = new List<Point> ();
                        for (int j = 0; j < convexDefect.toList().Count; j = j+4) {
                                Point farPoint = contours [boundPos].toList () [convexDefect.toList () [j + 2]];
                                int depth = convexDefect.toList () [j + 3];
                                if (depth > threasholdSlider.value && farPoint.y < a) {
                                        listPoDefect.Add (contours [boundPos].toList () [convexDefect.toList () [j + 2]]);
                                }
            //								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
                        }

                        MatOfPoint e2 = new MatOfPoint ();
                        e2.fromList (listPo);
                        defectPoints.Add (e2);

            //						Debug.Log ("hull: " + hull.toList ());
            //						Debug.Log ("defects: " + convexDefect.toList ());

                        Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            //                      int defectsTotal = (int)convexDefect.total();
            //						Debug.Log ("Defect total " + defectsTotal);

                        this.numberOfFingers = listPoDefect.Count;
                        if (this.numberOfFingers > 5)
                                this.numberOfFingers = 5;

            //						Debug.Log ("numberOfFingers " + numberOfFingers);

            //						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
                        numberOfFingersText.text = numberOfFingers.ToString ();

                        foreach (Point p in listPoDefect) {
                                Imgproc.circle (rgbaMat, p, 6, new Scalar (255, 0, 255, 255), -1);
                        }
        }
Ejemplo n.º 16
0
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);

//						Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours [boundPos].toArray()));
            Core.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

//						Debug.Log (
//						" Row start [" +
//								(int)boundRect.tl ().y + "] row end [" +
//								(int)boundRect.br ().y + "] Col start [" +
//								(int)boundRect.tl ().x + "] Col end [" +
//								(int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

//						Debug.Log (
//						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
            Core.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours [boundPos].toArray()), pointMat, 3, true);
            contours [boundPos] = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours [boundPos].toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contours [boundPos].toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();
            List <Point>      listPo     = new List <Point> ();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours [boundPos].toList() [hull.toList() [j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <MatOfPoint> defectPoints = new List <MatOfPoint> ();
            List <Point>      listPoDefect = new List <Point> ();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours [boundPos].toList() [convexDefect.toList() [j + 2]];
                int   depth    = convexDefect.toList() [j + 3];
                if (depth > threasholdSlider.value && farPoint.y < a)
                {
                    listPoDefect.Add(contours [boundPos].toList() [convexDefect.toList() [j + 2]]);
                }
//								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

//						Debug.Log ("hull: " + hull.toList ());
//						Debug.Log ("defects: " + convexDefect.toList ());

            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

//                      int defectsTotal = (int)convexDefect.total();
//						Debug.Log ("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

//						Debug.Log ("numberOfFingers " + numberOfFingers);

//						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Core.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Ejemplo n.º 17
0
    /// <summary>
    /// Hands the pose estimation process.
    /// </summary>
    public void handPoseEstimationProcess(Mat rgbaMat)
    {
        //Imgproc.blur(mRgba, mRgba, new Size(5,5));
        Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
        //Imgproc.medianBlur(mRgba, mRgba, 3);

        if (!isColorSelected)
        {
            return;
        }

        List <MatOfPoint> contours = detector.getContours();

        detector.process(rgbaMat);
        //Debug.Log(contours + " | " + contours.Count);
        //string[] output = contours.ToArray();

        for (int i = 0; i < contours.Count; i++)
        {
            //Debug.Log("MatOfPoint2f " + new MatOfPoint2f(contours[i].toArray()) + " | " + i);
            //Debug.Log("MatOfPoint " + contours [i] + " | " + i);
            //Imgproc.circle(rgbaMat, contours[i], 6, new Scalar(0, 255, 0, 255), -1);


            //Debug.Log ("kotka" +  MatOfPoint.ReferenceEquals(x, y));
        }

        if (contours.Count <= 0)
        {
            return;
        }


        RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

        double boundWidth  = rect.size.width;
        double boundHeight = rect.size.height;
        int    boundPos    = 0;

        for (int i = 1; i < contours.Count; i++)
        {
            rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
            if (rect.size.width * rect.size.height > boundWidth * boundHeight)
            {
                boundWidth  = rect.size.width;
                boundHeight = rect.size.height;
                boundPos    = i;
            }
        }

        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
        Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);
        //tochkaX = boundRect.tl ().x;
        //tochkaY = boundRect.tl ().y;
        Imgproc.circle(rgbaMat, boundRect.tl(), 6, new Scalar(0, 255, 0, 255), -1);
        Imgproc.circle(rgbaMat, boundRect.br(), 6, new Scalar(0, 255, 0, 255), -1);
        pointbX = boundRect.br().x;
        pointbY = boundRect.br().y;
        pointaX = boundRect.x;
        pointbY = boundRect.y;
        double a = boundRect.br().y - boundRect.tl().y;

        a = a * 0.7;
        a = boundRect.tl().y + a;
        Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);
        MatOfPoint2f pointMat = new MatOfPoint2f();

        Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
        contours[boundPos] = new MatOfPoint(pointMat.toArray());
        MatOfInt  hull         = new MatOfInt();
        MatOfInt4 convexDefect = new MatOfInt4();

        Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
        if (hull.toArray().Length < 3)
        {
            return;
        }
        Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);
        List <MatOfPoint> hullPoints = new List <MatOfPoint>();
        List <Point>      listPo     = new List <Point>();

        for (int j = 0; j < hull.toList().Count; j++)
        {
            listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
        }
        MatOfPoint e = new MatOfPoint();

        e.fromList(listPo);
        hullPoints.Add(e);
        List <MatOfPoint> defectPoints = new List <MatOfPoint>();
        List <Point>      listPoDefect = new List <Point>();

        for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
        {
            Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
            int   depth    = convexDefect.toList()[j + 3];
            if (depth > 8700 && farPoint.y < a)
            {
                listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
            }
        }

        MatOfPoint e2 = new MatOfPoint();

        e2.fromList(listPo);
        defectPoints.Add(e2);
        Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);
        this.numberOfFingers = listPoDefect.Count;
        if (this.numberOfFingers > 5)
        {
            this.numberOfFingers = 5;
        }
        foreach (Point p in listPoDefect)
        {
            Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
        }
    }
Ejemplo n.º 18
0
        //手を検出して画像に描画する
        private static void _handPoseEstimationProcess(Mat rgbaMat, Color handColor)
        {
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);

            //検出器に色を設定
            detector.setHsvColor(HGColorSpuiter.ColorToScalar(handColor));

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);
            if (contours.Count <= 0)
            {
                return;
            }

            //手の角度に傾いた外接矩形を作成
            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
            //手首までの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(WristRangeColor), 2, 8, 0);

            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //手のひらの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), HGColorSpuiter.ColorToScalar(PalmsRangeColor), 2, 8, 0);

            //折れ線カーブまたはポリゴンを,互いの距離が指定された精度以下になるように,より少ない頂点数のカーブやポリゴンで近似します
            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
            contours[boundPos] = new MatOfPoint(pointMat.toArray());

            //点とポリゴンの最短距離を計算
            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
            if (hull.toArray().Length < 3)
            {
                return;
            }
            Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);

            //手の範囲を取得
            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            //手の範囲を描画
            Imgproc.drawContours(rgbaMat, hullPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

            //指と認識した場所を取得
            List <MatOfPoint> defectPoints = new List <MatOfPoint>();
            List <Point>      listPoDefect = new List <Point>();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
                int   depth    = convexDefect.toList()[j + 3];
                if (depth > depthThreashold && farPoint.y < a)
                {
                    listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
                }
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

            //検出した指の本数を更新
            numberOfFingers = listPoDefect.Count;
            if (numberOfFingers > 5)
            {
                numberOfFingers = 5;
            }

            //指の間に点を描画
            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, HGColorSpuiter.ColorToScalar(BetweenFingersColor), -1);
            }
        }
Ejemplo n.º 19
0
    void Start()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/lena.jpg", 1); //512
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB);
        Imgproc.resize(srcMat, srcMat, new Size(512, 512));

        Texture2D src_t2d = new Texture2D(srcMat.width(), srcMat.height());

        Utils.matToTexture2D(srcMat, src_t2d);
        Sprite src_sp = Sprite.Create(src_t2d, new UnityEngine.Rect(0, 0, src_t2d.width, src_t2d.height), Vector2.zero);

        m_srcImage.sprite = src_sp;
        m_srcImage.rectTransform.offsetMin        = new Vector2(0, 0);
        m_srcImage.rectTransform.offsetMax        = new Vector2(srcMat.width(), srcMat.height());
        m_srcImage.rectTransform.anchoredPosition = Vector2.zero;

        //------------------------------------------------//

        //轮廓提取
        Mat        mask = Mat.zeros(srcMat.size(), CvType.CV_8UC3);
        Point      p0   = new Point(0, 0);
        Point      p1   = new Point(0, 256);
        Point      p2   = new Point(256, 0);
        MatOfPoint pts1 = new MatOfPoint(new Point[3] {
            p0, p1, p2
        });
        Point      p3   = new Point(256, 0);
        Point      p4   = new Point(512, 0);
        Point      p5   = new Point(512, 256);
        MatOfPoint pts2 = new MatOfPoint(new Point[3] {
            p3, p4, p5
        });
        List <MatOfPoint> contour = new List <MatOfPoint>()
        {
            pts1, pts2
        };

        for (int i = 0; i < contour.Count; i++)
        {
            Imgproc.drawContours(mask, contour, i, new Scalar(255), -1); //全部放到mask上
        }

        //------------------------------------------------//

        MatOfPoint PointArray = new MatOfPoint();

        dstMat = Mat.zeros(srcMat.size(), CvType.CV_8UC3);
        PointArray.fromList(new List <Point>()
        {
            new Point(50, 10),
            new Point(300, 12),
            new Point(350, 250),
            new Point(9, 250),
        });
        Imgproc.fillConvexPoly(dstMat, PointArray, new Scalar(255, 0, 0), 4, 0);

        //------------------------------------------------//

        OpenCVForUnity.Rect r1 = Imgproc.boundingRect(pts1);


        Texture2D dst_t2d = new Texture2D(dstMat.width(), dstMat.height());

        Utils.matToTexture2D(dstMat, dst_t2d);
        Sprite dst_sp = Sprite.Create(dst_t2d, new UnityEngine.Rect(0, 0, dst_t2d.width, dst_t2d.height), Vector2.zero);

        m_roiImage.sprite                         = dst_sp;
        m_roiImage.preserveAspect                 = true;
        m_roiImage.rectTransform.offsetMin        = new Vector2(0, 0);
        m_roiImage.rectTransform.offsetMax        = new Vector2(dstMat.width(), dstMat.height());
        m_roiImage.rectTransform.anchoredPosition = Vector2.zero;
    }
Ejemplo n.º 20
0
        /// <summary>
        /// Processes points by filter.
        /// </summary>
        /// <param name="img">Image mat.</param>
        /// <param name="srcPoints">Input points.</param>
        /// <param name="dstPoints">Output points.</param>
        /// <param name="drawDebugPoints">if true, draws debug points.</param>
        /// <returns>Output points.</returns>
        public override List <Vector2> Process(Mat img, List <Vector2> srcPoints, List <Vector2> dstPoints = null, bool drawDebugPoints = false)
        {
            if (srcPoints != null && srcPoints.Count != numberOfElements)
            {
                throw new ArgumentException("The number of elements is different.");
            }

            if (srcPoints != null)
            {
                if (dstPoints == null)
                {
                    dstPoints = new List <Vector2> ();
                }
                if (dstPoints != null && dstPoints.Count != numberOfElements)
                {
                    dstPoints.Clear();
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints.Add(new Vector2());
                    }
                }

                for (int i = 0; i < numberOfElements; i++)
                {
                    src_points [i].x = srcPoints [i].x;
                    src_points [i].y = srcPoints [i].y;
                }

                // clac diffDlib
                prevTrackPtsMat.fromList(src_points);
                OpenCVForUnity.Rect rect = Imgproc.boundingRect(prevTrackPtsMat);
                double diffDlib          = this.diffDlib * rect.area() / 40000.0 * diffCheckSensitivity;

                // if the face is moving so fast, use dlib to detect the face
                double diff = calDistanceDiff(src_points, last_points);
                if (drawDebugPoints)
                {
                    Debug.Log("variance:" + diff);
                }
                if (diff > diffDlib)
                {
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints [i] = srcPoints [i];
                    }

                    if (drawDebugPoints)
                    {
                        Debug.Log("DLIB");
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            Imgproc.circle(img, new Point(srcPoints [i].x, srcPoints [i].y), 2, new Scalar(255, 0, 0, 255), -1);
                        }
                    }

                    flag = false;
                }
                else
                {
                    if (!flag)
                    {
                        // Set initial state estimate.
                        Mat     statePreMat = KF.get_statePre();
                        float[] tmpStatePre = new float[statePreMat.total()];
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            tmpStatePre [i * 2]     = (float)srcPoints [i].x;
                            tmpStatePre [i * 2 + 1] = (float)srcPoints [i].y;
                        }
                        statePreMat.put(0, 0, tmpStatePre);
                        Mat     statePostMat = KF.get_statePost();
                        float[] tmpStatePost = new float[statePostMat.total()];
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            tmpStatePost [i * 2]     = (float)srcPoints [i].x;
                            tmpStatePost [i * 2 + 1] = (float)srcPoints [i].y;
                        }
                        statePostMat.put(0, 0, tmpStatePost);

                        flag = true;
                    }

                    // Kalman Prediction
                    KF.predict();

                    // Update Measurement
                    float[] tmpMeasurement = new float[measurement.total()];
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        tmpMeasurement [i * 2]     = (float)srcPoints [i].x;
                        tmpMeasurement [i * 2 + 1] = (float)srcPoints [i].y;
                    }
                    measurement.put(0, 0, tmpMeasurement);

                    // Correct Measurement
                    Mat     estimated    = KF.correct(measurement);
                    float[] tmpEstimated = new float[estimated.total()];
                    estimated.get(0, 0, tmpEstimated);
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        predict_points [i].x = tmpEstimated [i * 2];
                        predict_points [i].y = tmpEstimated [i * 2 + 1];
                    }
                    estimated.Dispose();

                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints [i] = new Vector2((float)predict_points [i].x, (float)predict_points [i].y);
                    }

                    if (drawDebugPoints)
                    {
                        Debug.Log("Kalman Filter");
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            Imgproc.circle(img, predict_points [i], 2, new Scalar(0, 255, 0, 255), -1);
                        }
                    }
                }

                for (int i = 0; i < numberOfElements; i++)
                {
                    last_points [i].x = src_points [i].x;
                    last_points [i].y = src_points [i].y;
                }

                return(dstPoints);
            }
            else
            {
                return(dstPoints == null ? srcPoints : dstPoints);
            }
        }
Ejemplo n.º 21
0
        /*=============================================*
        * 輪郭ごとの頂点から手を判別するまで
        *=============================================*/
        /// <summary>
        /// Contours to hand gesture.
        /// </summary>
        /// <param name="rgbaMat">Rgba mat.</param>
        /// <param name="contour">Contour.</param>
        private static void _contourToHandGesture(Mat rgbaMat, MatOfPoint contour)
        {
            try
            {
                //頂点を調査する準備をする
                _pointOfVertices(rgbaMat, contour);

                //基準輪郭のサイズの取得と描画(長方形)
                OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
                Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(ContourRangeColor), 2, 8, 0);

                /*=============================================*
                 * 腕まで含んだ手の大きさを取得する
                 **=============================================*/
                //腕まで含んだ手の大きさを識別する
                MatOfInt hull = new MatOfInt();
                Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

                //腕まで含んだ手の範囲を取得
                List <Point> armPointList = new List <Point>();
                for (int j = 0; j < hull.toList().Count; j++)
                {
                    Point armPoint = contour.toList()[hull.toList()[j]];
                    bool  addFlag  = true;
                    foreach (Point point in armPointList.ToArray())
                    {
                        //輪郭の1/10より近い頂点は誤差としてまとめる
                        double distance = Mathf.Sqrt((float)((armPoint.x - point.x) * (armPoint.x - point.x) + (armPoint.y - point.y) * (armPoint.y - point.y)));
                        if (distance <= Mathf.Min((float)boundRect.width, (float)boundRect.height) / 10)
                        {
                            addFlag = false;
                            break;
                        }
                    }
                    if (addFlag)
                    {
                        armPointList.Add(armPoint);
                    }
                }

                MatOfPoint armMatOfPoint = new MatOfPoint();
                armMatOfPoint.fromList(armPointList);
                List <MatOfPoint> armPoints = new List <MatOfPoint>();
                armPoints.Add(armMatOfPoint);

                //腕まで含んだ手の範囲を描画
                Imgproc.drawContours(rgbaMat, armPoints, -1, HGColorSpuiter.ColorToScalar(ArmRangeColor), 3);

                //腕まで含んだ手が三角形の場合はそれ以上の識別が難しい
                if (hull.toArray().Length < 3)
                {
                    return;
                }

                /*=============================================*
                 * 掌の大きさを取得する
                 **=============================================*/
                //凸面の頂点から凹面の点のみを取得し、掌の範囲を取得する
                MatOfInt4 convexDefect = new MatOfInt4();
                Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

                //凹面の点をフィルタリングして取得
                List <Point> palmPointList = new List <Point>();
                for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
                {
                    Point farPoint = contour.toList()[convexDefect.toList()[j + 2]];
                    int   depth    = convexDefect.toList()[j + 3];
                    if (depth > depthThreashold && farPoint.y < boundRect.br().y - boundRect.tl().y)
                    {
                        palmPointList.Add(contour.toList()[convexDefect.toList()[j + 2]]);
                    }
                }

                MatOfPoint palmMatOfPoint = new MatOfPoint();
                palmMatOfPoint.fromList(palmPointList);
                List <MatOfPoint> palmPoints = new List <MatOfPoint>();
                palmPoints.Add(palmMatOfPoint);

                //掌の範囲を描画
                Imgproc.drawContours(rgbaMat, palmPoints, -1, HGColorSpuiter.ColorToScalar(PalmRangeColor), 3);

                /*=============================================*
                 * 掌+指先の大きさを取得する
                 **=============================================*/
                //掌の位置を元に手首を除いた範囲を取得する
                List <Point> handPointList = new List <Point>();
                handPointList.AddRange(armPointList.ToArray());
                handPointList.Reverse();
                handPointList.RemoveAt(0);
                handPointList.Insert(0, palmPointList.ToArray()[0]);
                handPointList.RemoveAt(handPointList.Count - 1);
                handPointList.Insert(handPointList.Count, palmPointList.ToArray()[palmPointList.Count - 1]);

                MatOfPoint handMatOfPoint = new MatOfPoint();
                handMatOfPoint.fromList(handPointList);
                List <MatOfPoint> handPoints = new List <MatOfPoint>();
                handPoints.Add(handMatOfPoint);

                Imgproc.drawContours(rgbaMat, handPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

                /*=============================================*
                 * 指先の位置を取得する
                 **=============================================*/
                //掌の各頂点の中心を求める
                List <Point> palmCenterPoints = new List <Point>();
                for (int i = 0; i < palmPointList.Count; i++)
                {
                    Point palmPoint     = palmPointList.ToArray()[i];
                    Point palmPointNext = new Point();
                    if (i + 1 < palmPointList.Count)
                    {
                        palmPointNext = palmPointList.ToArray()[i + 1];
                    }
                    else
                    {
                        palmPointNext = palmPointList.ToArray()[0];
                    }

                    Point palmCenterPoint = new Point((palmPoint.x + palmPointNext.x) / 2, (palmPoint.y + palmPointNext.y) / 2);
                    palmCenterPoints.Add(palmCenterPoint);
                }

                //掌の頂点から最も近い手の頂点を求める
                for (int i = 0; i < palmCenterPoints.Count && i + 1 < handPointList.Count && i < 5; i++)
                {
                    Point palmPoint = palmCenterPoints.ToArray()[i];


                    List <Point> fingerList = new List <Point>();
                    fingerList.Add(palmPoint);
                    fingerList.Add(handPointList.ToArray()[i + 1]);

                    MatOfPoint fingerPoint = new MatOfPoint();
                    fingerPoint.fromList(fingerList);

                    List <MatOfPoint> fingerPoints = new List <MatOfPoint>();
                    fingerPoints.Add(fingerPoint);

                    Imgproc.drawContours(rgbaMat, fingerPoints, -1, HGColorSpuiter.ColorToScalar(FingerRangeColor), 3);
                }

//				Imgproc.putText(rgbaMat, "", new Point(2, rgbaMat.rows()-30), Core.FONT_HERSHEY_SIMPLEX, 1.0, HGColorSpuiter.ColorToScalar(Color.black), 2, Imgproc.LINE_AA, false);
            }
            catch (System.Exception e)
            {
                Debug.Log(e.Message);
            }
        }