Example #1
0
        protected void findCenterPoint()
        {
            float x = (float)(minimalBB.tl().x + minimalBB.br().x) / 2;
            float y = (float)(minimalBB.tl().y + minimalBB.br().y) / 2;

            centerPoint = new Vector2(x, y);
        }
Example #2
0
        private void zoomCropped(ref Mat croppedImage, ref OpenCVForUnity.Rect bb)
        {
            int croppedWidth  = croppedImage.cols();
            int croppedHeight = croppedImage.rows();

            OpenCVForUnity.Rect expandedBB;

            if (croppedWidth > croppedHeight)
            {
                int topMargin = (croppedWidth - croppedHeight) / 2;
                int botMargin = topMargin;

                // Needed due to percision loss when /2
                if ((croppedHeight + topMargin * 2) != croppedWidth)
                {
                    botMargin = croppedWidth - croppedHeight - topMargin;
                }

                Core.copyMakeBorder(croppedImage, croppedImage, topMargin, botMargin, 0, 0, Core.BORDER_REPLICATE);
                expandedBB = new OpenCVForUnity.Rect(
                    new Point(bb.tl().x, bb.tl().y - topMargin),
                    new Point(bb.br().x, bb.br().y + botMargin));
            }
            else if (croppedHeight > croppedWidth)
            {
                int lefMargin = (croppedHeight - croppedWidth) / 2;
                int rigMargin = lefMargin;

                // Need due to percision loss when /2
                if ((croppedWidth + lefMargin * 2) != croppedHeight)
                {
                    rigMargin = croppedHeight - croppedWidth - lefMargin;
                }

                Core.copyMakeBorder(croppedImage, croppedImage, 0, 0, lefMargin, rigMargin, Core.BORDER_REPLICATE);
                expandedBB = new OpenCVForUnity.Rect(
                    new Point(bb.tl().x - lefMargin, bb.tl().y),
                    new Point(bb.br().x + rigMargin, bb.br().y));
            }
            else
            {
                expandedBB = bb;
            }

            // We have the originPoint & originalSize in the frame cordinate here.
            originPoint  = expandedBB.tl();
            originImage  = croppedImage.clone();
            originalSize = expandedBB.size();

            Mat scaleImage = new Mat();

            Imgproc.resize(croppedImage, scaleImage, new Size(Constant.MODEL_HEIGHT, Constant.MODEL_WIDTH));

            // Return croppedImage[224*224*3] bb(original cordinate expandedBB)
            croppedImage = scaleImage;
            bb           = expandedBB;
        }
Example #3
0
    // rectの最小・最大をmat領域内におさめたRectを得る
    public static OpenCVForUnity.Rect calcRectWithinMat(OpenCVForUnity.Rect rect, Mat mat)
    {
        int minLimitX = (int)Mathf.Min(Mathf.Max((float)0.0, (float)rect.tl().x), (float)mat.cols());
        int maxLimitX = (int)Mathf.Min(Mathf.Max((float)0.0, (float)rect.br().x), (float)mat.cols());
        int minLimitY = (int)Mathf.Min(Mathf.Max((float)0.0, (float)rect.tl().y), (float)mat.rows());
        int maxLimitY = (int)Mathf.Min(Mathf.Max((float)0.0, (float)rect.br().y), (float)mat.rows());

        return(new OpenCVForUnity.Rect(minLimitX, minLimitY, maxLimitX - minLimitX, maxLimitY - minLimitY));
    }
Example #4
0
        private Mat cropTexToModelSizeMat(Texture2D sourceTex, List <int> thresList)
        {
            Mat sourceImage = new Mat(sourceTex.height, sourceTex.width, CvType.CV_8UC3);

            Utils.texture2DToMat(sourceTex, sourceImage);

            // BGR to HSV
            Mat        hsvImage = new Mat(sourceImage.rows(), sourceImage.cols(), CvType.CV_8UC3);
            List <Mat> hsvList  = new List <Mat>();

            Imgproc.cvtColor(sourceImage, hsvImage, Imgproc.COLOR_BGR2HSV);
            // InRange
            Mat grayImage = new Mat(sourceImage.rows(), sourceImage.cols(), CvType.CV_8UC1);

            Core.inRange(hsvImage,
                         new Scalar(thresList[0], thresList[2], thresList[4]),
                         new Scalar(thresList[1], thresList[3], thresList[5]),
                         grayImage);
            Imgproc.morphologyEx(grayImage, grayImage, Imgproc.MORPH_OPEN,
                                 Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)));

            // Find Contours
            List <MatOfPoint> contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();

            Imgproc.findContours(grayImage, contours, hierarchy, Imgproc.RETR_EXTERNAL,
                                 Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

            int    maxAreaIdex = 0;
            double maxArea     = 0;

            for (var i = 0; i < contours.Count; i++)
            {
                double area = Imgproc.contourArea(contours[i]);
                if (area > maxArea)
                {
                    maxArea     = area;
                    maxAreaIdex = i;
                }
            }
            // Find Bounding Box
            OpenCVForUnity.Rect roi = Imgproc.boundingRect(contours[maxAreaIdex]);
            OpenCVForUnity.Rect bb  = new OpenCVForUnity.Rect(
                new Point(Math.Max(roi.tl().x - 50.0, 0),
                          Math.Max(roi.tl().y - 50.0, 0)),
                new Point(Math.Min(roi.br().x + 50.0, sourceImage.cols()),
                          Math.Min(roi.br().y + 50.0, sourceImage.rows())));
            Mat croppedImage = new Mat(sourceImage, bb);

            // Zoom to 224*224
            zoomCropped(ref croppedImage, ref bb);

            return(croppedImage);
        }
Example #5
0
    private void RunCamShift()
    {
        //Debug.Log("qui");
        //Imgproc.rectangle(rgbMat, roiRect.tl(), roiRect.br(), new Scalar(255, 255, 255, 255), 2);
        using (Mat backProj = new Mat())
        {
            Imgproc.calcBackProject(new List <Mat>(new Mat[] { hsvMat }), new MatOfInt(1), roiHistMat, backProj, new MatOfFloat(100, 255), 1.0);


            SaveMatToFile("backProjBefore" + ss, backProj);



            Mat kernelD = new Mat(10, 10, CvType.CV_8UC1, new Scalar(255, 255, 255));
            Imgproc.dilate(backProj, backProj, kernelD);
            Mat new_back_proj = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC1, new Scalar(255, 255, 255)) - backProj;
            //hs();


            RotatedRect r = Video.CamShift(backProj, roiRect, termination);
            SaveMatToFile("backProjAfter" + ss, new_back_proj);
            //SaveMatToFile("hsvMat" + ss, hsvMat);
        }
        Imgproc.rectangle(rgbMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
    }
Example #6
0
    public void perspectiveAlign()
    {
        if (nowDetected)
        {
            Mat align = new Mat();
            orderRectCorners(nowRectPoints);
            Mat srcQuad = Converters.vector_Point_to_Mat(nowRectPoints, CvType.CV_32F);
            Mat dstQuad = Converters.vector_Point_to_Mat(dstRectPoints, CvType.CV_32F);

            Mat M = Imgproc.getPerspectiveTransform(srcQuad, dstQuad);
            Imgproc.warpPerspective(img_orig, align, M, new Size(1120, 860));

            int diffX = 60;
            int diffY = 60;

            Rect lt = new Rect(new Point(0, 0), new Point(diffX, diffY));
            Rect rt = new Rect(new Point(align.width() - 1 - diffX, 0), new Point(align.width() - 1, diffY));
            Rect lb = new Rect(new Point(0, align.height() - 1 - diffY), new Point(diffX, align.height() - 1));
            Rect rb = new Rect(new Point(align.width() - 1 - diffX, align.height() - 1 - diffY), new Point(align.width() - 1, align.height() - 1));

            // left-top
            Imgproc.rectangle(align, lt.tl(), lt.br(), new Scalar(0, 255, 0, 255), 1);
            // right-top
            Imgproc.rectangle(align, rt.tl(), rt.br(), new Scalar(0, 255, 0, 255), 1);
            // left-bottom
            Imgproc.rectangle(align, lb.tl(), lb.br(), new Scalar(0, 255, 0, 255), 1);
            // right-bottom
            Imgproc.rectangle(align, rb.tl(), rb.br(), new Scalar(0, 255, 0, 255), 1);

            //for (int i = 0; i < 20; i++)
            //{
            //    Rect r = new Rect(new Point(435, 137.5 + 32.5 * i), new Point(435 + 110, 170 + 32.5 * i));
            //    int num = getAnswerNumber(align, r);
            //    Imgproc.putText(align, " " + num, new Point(r.x - 40, r.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false);
            //    Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 2);
            //}
            //
            //for (int i = 0; i < 20; i++)
            //{
            //    Rect r = new Rect(new Point(590, 137.5 + 32.5 * i), new Point(590 + 110, 170 + 32.5 * i));
            //    int num = getAnswerNumber(align, r);
            //    Imgproc.putText(align, " " + num, new Point(r.x - 40, r.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false);
            //    Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 2);
            //}
            //
            //for (int i = 0; i < 5; i++)
            //{
            //    Rect r = new Rect(new Point(750, 137.5 + 32.5 * i), new Point(750 + 110, 170 + 32.5 * i));
            //    int num = getAnswerNumber(align, r);
            //    Imgproc.putText(align, " " + num, new Point(r.x - 40, r.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false);
            //    Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 2);
            //}

            getAnswerNumber(align);
            result.GetComponent <Renderer>().material.mainTexture = result_texture;
            result_texture.Resize(align.width(), align.height());
            result.gameObject.transform.localScale = new Vector3(align.width() / 2.5f, align.height() / 2.5f, 3);
            Utils.matToTexture2D(align, result_texture);
        }
    }
Example #7
0
 public void drawRect(Mat toMat, Scalar color = null, int thickness = 1)
 {
     if (color == null)
     {
         color = new Scalar(0, 255, 0);
     }
     Imgproc.rectangle(toMat, rect.tl(), rect.br(), color, thickness);
 }
Example #8
0
    public static void affineTransform(Mat src, Mat dst, OpenCVForUnity.Rect roi)
    {
        // アフィン行列を取得
        var srcPoints = new MatOfPoint2f(new Point(0.0, 0.0), new Point(src.cols() - 1, 0.0), new Point(src.cols() - 1, src.rows() - 1));
        var dstPoints = new MatOfPoint2f(roi.tl(), new Point(roi.x + roi.width, roi.y), roi.br());
        Mat transform = Imgproc.getAffineTransform(srcPoints, dstPoints);

        // アフィン変換
        Imgproc.warpAffine(src, dst, transform, dst.size(), Imgproc.INTER_LINEAR);
    }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                detector.detect(rgbaMat, keypoints);
//                Debug.Log ("keypoints.ToString() " + keypoints.ToString());
                Features2d.drawKeypoints(rgbMat, keypoints, rgbaMat, Scalar.all(-1), Features2d.NOT_DRAW_SINGLE_POINTS);

                Imgproc.rectangle(rgbaMat, patternRect.tl(), patternRect.br(), new Scalar(255, 0, 0, 255), 5);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Example #10
0
    public static Mat crop(Mat sourceImage, List <int> thresList)
    {
        Mat        hsvImage = new Mat(sourceImage.rows(), sourceImage.cols(), CvType.CV_8UC3);
        List <Mat> hsvList  = new List <Mat>();

        Imgproc.cvtColor(sourceImage, hsvImage, Imgproc.COLOR_BGR2HSV);

        Mat grayImage = new Mat(sourceImage.rows(), sourceImage.cols(), CvType.CV_8UC3);

        Core.inRange(hsvImage,
                     new Scalar(thresList[0], thresList[2], thresList[4]),
                     new Scalar(thresList[1], thresList[3], thresList[5]),
                     grayImage);
        Imgproc.morphologyEx(grayImage, grayImage, Imgproc.MORPH_OPEN,
                             Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(MORPH_KERNEL_SIZE, MORPH_KERNEL_SIZE)));

        // Find Contours
        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(grayImage, contours, hierarchy, Imgproc.RETR_EXTERNAL,
                             Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

        int    maxAreaIdex = 0;
        double maxArea     = 0;

        for (var i = 0; i < contours.Count; i++)
        {
            double area = Imgproc.contourArea(contours[i]);
            Debug.Log("CropImage.cs crop() : contours[" + i + "].Area = " + area);
            if (area > maxArea)
            {
                maxArea     = area;
                maxAreaIdex = i;
            }
        }

        OpenCVForUnity.Rect roi = Imgproc.boundingRect(contours[maxAreaIdex]);
        OpenCVForUnity.Rect bb  = new OpenCVForUnity.Rect(new Point(Math.Max(roi.tl().x - 50.0, 0), Math.Max(roi.tl().y - 50.0, 0)),
                                                          new Point(Math.Min(roi.br().x + 50.0, sourceImage.cols()), Math.Min(roi.br().y + 50.0, sourceImage.rows())));
        Mat croppedImage = new Mat(sourceImage, bb);

        Mat resultImage = zoomCropped(croppedImage);

        return(resultImage);
    }
Example #11
0
        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);


                //flip to correct direction.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }


                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);


                Point[] points = roiPointList.ToArray();

                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat()) {
                        Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                    }

                                                                                #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        if (Input.GetTouch(0).phase == TouchPhase.Ended)
                        {
                            roiPointList.Clear();
                        }
                    }
                                                                                #else
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Clear();
                    }
#endif
                }


                if (roiPointList.Count < 4)
                {
                                                                                #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        Touch t = Input.GetTouch(0);
                        if (t.phase == TouchPhase.Ended)
                        {
                            roiPointList.Add(convertScreenPoint(new Point(t.position.x, t.position.y), gameObject, Camera.main));
//									Debug.Log ("touch X " + t.position.x);
//									Debug.Log ("touch Y " + t.position.y);

                            if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                            {
                                roiPointList.RemoveAt(roiPointList.Count - 1);
                            }
                        }
                    }
#else
                    //Mouse
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
//												Debug.Log ("mouse X " + Input.mousePosition.x);
//												Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }
#endif


                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray())) {
                            roiRect = Imgproc.boundingRect(roiPointMat);
                        }


                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat()) {
                                Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

//														Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Core.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Core.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }

                    Core.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

                Core.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);


                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.height / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("OpenCVForUnitySample");
            }
            if (GUILayout.Button("change camera"))
            {
                shouldUseFrontFacing = !shouldUseFrontFacing;
                StartCoroutine(init());
            }


            GUILayout.EndVertical();
        }
Example #12
0
        /*=============================================*
        * 輪郭ごとの頂点から手を判別するまで
        *=============================================*/
        /// <summary>
        /// Contours to hand gesture.
        /// </summary>
        /// <param name="rgbaMat">Rgba mat.</param>
        /// <param name="contour">Contour.</param>
        private static void _contourToHandGesture(Mat rgbaMat, MatOfPoint contour)
        {
            try
            {
                //頂点を調査する準備をする
                _pointOfVertices(rgbaMat, contour);

                //基準輪郭のサイズの取得と描画(長方形)
                OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
                Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(ContourRangeColor), 2, 8, 0);

                /*=============================================*
                 * 腕まで含んだ手の大きさを取得する
                 **=============================================*/
                //腕まで含んだ手の大きさを識別する
                MatOfInt hull = new MatOfInt();
                Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

                //腕まで含んだ手の範囲を取得
                List <Point> armPointList = new List <Point>();
                for (int j = 0; j < hull.toList().Count; j++)
                {
                    Point armPoint = contour.toList()[hull.toList()[j]];
                    bool  addFlag  = true;
                    foreach (Point point in armPointList.ToArray())
                    {
                        //輪郭の1/10より近い頂点は誤差としてまとめる
                        double distance = Mathf.Sqrt((float)((armPoint.x - point.x) * (armPoint.x - point.x) + (armPoint.y - point.y) * (armPoint.y - point.y)));
                        if (distance <= Mathf.Min((float)boundRect.width, (float)boundRect.height) / 10)
                        {
                            addFlag = false;
                            break;
                        }
                    }
                    if (addFlag)
                    {
                        armPointList.Add(armPoint);
                    }
                }

                MatOfPoint armMatOfPoint = new MatOfPoint();
                armMatOfPoint.fromList(armPointList);
                List <MatOfPoint> armPoints = new List <MatOfPoint>();
                armPoints.Add(armMatOfPoint);

                //腕まで含んだ手の範囲を描画
                Imgproc.drawContours(rgbaMat, armPoints, -1, HGColorSpuiter.ColorToScalar(ArmRangeColor), 3);

                //腕まで含んだ手が三角形の場合はそれ以上の識別が難しい
                if (hull.toArray().Length < 3)
                {
                    return;
                }

                /*=============================================*
                 * 掌の大きさを取得する
                 **=============================================*/
                //凸面の頂点から凹面の点のみを取得し、掌の範囲を取得する
                MatOfInt4 convexDefect = new MatOfInt4();
                Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

                //凹面の点をフィルタリングして取得
                List <Point> palmPointList = new List <Point>();
                for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
                {
                    Point farPoint = contour.toList()[convexDefect.toList()[j + 2]];
                    int   depth    = convexDefect.toList()[j + 3];
                    if (depth > depthThreashold && farPoint.y < boundRect.br().y - boundRect.tl().y)
                    {
                        palmPointList.Add(contour.toList()[convexDefect.toList()[j + 2]]);
                    }
                }

                MatOfPoint palmMatOfPoint = new MatOfPoint();
                palmMatOfPoint.fromList(palmPointList);
                List <MatOfPoint> palmPoints = new List <MatOfPoint>();
                palmPoints.Add(palmMatOfPoint);

                //掌の範囲を描画
                Imgproc.drawContours(rgbaMat, palmPoints, -1, HGColorSpuiter.ColorToScalar(PalmRangeColor), 3);

                /*=============================================*
                 * 掌+指先の大きさを取得する
                 **=============================================*/
                //掌の位置を元に手首を除いた範囲を取得する
                List <Point> handPointList = new List <Point>();
                handPointList.AddRange(armPointList.ToArray());
                handPointList.Reverse();
                handPointList.RemoveAt(0);
                handPointList.Insert(0, palmPointList.ToArray()[0]);
                handPointList.RemoveAt(handPointList.Count - 1);
                handPointList.Insert(handPointList.Count, palmPointList.ToArray()[palmPointList.Count - 1]);

                MatOfPoint handMatOfPoint = new MatOfPoint();
                handMatOfPoint.fromList(handPointList);
                List <MatOfPoint> handPoints = new List <MatOfPoint>();
                handPoints.Add(handMatOfPoint);

                Imgproc.drawContours(rgbaMat, handPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

                /*=============================================*
                 * 指先の位置を取得する
                 **=============================================*/
                //掌の各頂点の中心を求める
                List <Point> palmCenterPoints = new List <Point>();
                for (int i = 0; i < palmPointList.Count; i++)
                {
                    Point palmPoint     = palmPointList.ToArray()[i];
                    Point palmPointNext = new Point();
                    if (i + 1 < palmPointList.Count)
                    {
                        palmPointNext = palmPointList.ToArray()[i + 1];
                    }
                    else
                    {
                        palmPointNext = palmPointList.ToArray()[0];
                    }

                    Point palmCenterPoint = new Point((palmPoint.x + palmPointNext.x) / 2, (palmPoint.y + palmPointNext.y) / 2);
                    palmCenterPoints.Add(palmCenterPoint);
                }

                //掌の頂点から最も近い手の頂点を求める
                for (int i = 0; i < palmCenterPoints.Count && i + 1 < handPointList.Count && i < 5; i++)
                {
                    Point palmPoint = palmCenterPoints.ToArray()[i];


                    List <Point> fingerList = new List <Point>();
                    fingerList.Add(palmPoint);
                    fingerList.Add(handPointList.ToArray()[i + 1]);

                    MatOfPoint fingerPoint = new MatOfPoint();
                    fingerPoint.fromList(fingerList);

                    List <MatOfPoint> fingerPoints = new List <MatOfPoint>();
                    fingerPoints.Add(fingerPoint);

                    Imgproc.drawContours(rgbaMat, fingerPoints, -1, HGColorSpuiter.ColorToScalar(FingerRangeColor), 3);
                }

//				Imgproc.putText(rgbaMat, "", new Point(2, rgbaMat.rows()-30), Core.FONT_HERSHEY_SIMPLEX, 1.0, HGColorSpuiter.ColorToScalar(Color.black), 2, Imgproc.LINE_AA, false);
            }
            catch (System.Exception e)
            {
                Debug.Log(e.Message);
            }
        }
Example #13
0
        // Update is called once per frame
        void Update()
        {
            // Sample rate update
            ++frames;
            float timeNow = Time.realtimeSinceStartup;

            if (timeNow > lastInterval + updateInterval)
            {
                fps          = (float)(frames / (timeNow - lastInterval));
                frames       = 0;
                lastInterval = timeNow;
            }

            // Time since last update
            float dt = 1 / fps;

            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();
                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);


                Point[] points = roiPointList.ToArray();

                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat())
                    {
                        Imgproc.calcBackProject(new List <Mat>(new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);


                        ///////////////////////////////////////////////////////
                        // Kalman Filter Start
                        ///////////////////////////////////////////////////////

                        // Process noise matrix, Q
                        Matrix Q2 = (Mathf.Pow(pn, 2) / dt) * Matrix.Build.DenseOfArray(new float[, ]
                        {
                            { Mathf.Pow(dt, 4) / 4, Mathf.Pow(dt, 3) / 2f, Mathf.Pow(dt, 2) / 2f },
                            { Mathf.Pow(dt, 3) / 2f, Mathf.Pow(dt, 2) / 2f, dt },
                            { Mathf.Pow(dt, 2) / 2f, dt, 1 }
                        });

                        Q = (Mathf.Pow(pn, 2) / dt) * Matrix.Build.DenseOfMatrixArray(new Matrix[, ]
                        {
                            { Q2, zero3x3, zero3x3 },
                            { zero3x3, Q2, zero3x3 },
                            { zero3x3, zero3x3, Q2 }
                        });

                        // Measurement noise matrix, R
                        R = Mathf.Pow(mn, 2) * Matrix.Build.DenseIdentity(3);

                        // Build transition and control matrices
                        Matrix F2 = Matrix.Build.DenseOfArray(new float[, ] {
                            { 1, dt, Mathf.Pow(dt, 2) / 2f }, { 0, 1, dt }, { 0, 0, 1 }
                        });
                        Matrix F = Matrix.Build.DenseOfMatrixArray(new Matrix[, ]
                        {
                            { F2, zero3x3, zero3x3 },
                            { zero3x3, F2, zero3x3 },
                            { zero3x3, zero3x3, F2 },
                        });

                        // Prediction
                        Pp = F * Pe * F.Transpose() + Q;
                        xp = F * xe;

                        roiPred = new OpenCVForUnity.Rect((int)(xp[0] - 0.5f * xp[6]), (int)(xp[3] - 0.5f * xp[6]), (int)xp[6], (int)xp[6]);
                        roiRect = new OpenCVForUnity.Rect((int)(xp[0] - 0.5f * roiSearch.width), (int)(xp[3] - 0.5f * roiSearch.height), roiSearch.width, roiSearch.height);
                        // roiRect = roiPred.clone();

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        ObjectFound = (roiRect.height > 0 && roiRect.width > 0);

                        // Innovation
                        Vector nu;
                        if (ObjectFound)
                        {
                            // Innovation
                            Vector zk = Vector.Build.DenseOfArray(new float[] { (int)(roiRect.x + 0.5f * roiRect.width),
                                                                                (int)(roiRect.y + 0.5f * roiRect.height),
                                                                                (int)(0.5 * (roiRect.width + roiRect.height)) });
                            nu = zk - H * xp;

                            // Search window update
                            roiSearch = r.boundingRect().clone();

                            // Debug
                            SpeedAtFailure = -1f;
                        }
                        else
                        {
                            roiRect = roiPred.clone();

                            if (xp[0] < 0 || xp[0] < 0 || xp[0] > 640 || xp[3] > 480)
                            {
                                xp[0] = 320f; xp[1] = 0; xp[2] = 0;
                                xp[3] = 240f; xp[4] = 0; xp[5] = 0;
                                xp[6] = 40f; xp[7] = 0; xp[8] = 0;

                                roiRect.x      = (int)(320 - 0.5f * 40);
                                roiRect.y      = (int)(240 - 0.5f * 40);
                                roiRect.height = 40;
                                roiRect.width  = 40;

                                roiPred = roiRect.clone();
                            }

                            // Innovation
                            Vector zk = Vector.Build.DenseOfArray(new float[] { (float)(roiRect.x + 0.5f * roiRect.width),
                                                                                (float)(roiRect.y + 0.5f * roiRect.height),
                                                                                (float)(0.5 * (roiRect.width + roiRect.height)) });

                            nu        = zk - H * xp;
                            roiSearch = roiPred.clone();
                        }

                        // Kalman gain
                        Matrix K = Pp * H.Transpose() * R.Transpose();


                        // Innovation gain
                        Vector gain = K * nu;

                        // State update
                        xe = xp + gain;

                        // Covariance update
                        Pe = (Pp.Inverse() + H.Transpose() * R.Transpose() * H).Inverse();

                        // Display results to console
                        StateArray      = xe.ToArray();
                        InnovationGain  = gain.ToArray();
                        CovarianceTrace = Pe.Diagonal().ToArray();

                        ///////////////////////////////////////////////////////
                        // Kalman Filter End
                        ///////////////////////////////////////////////////////

                        r.points(points);
                    }

                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Clear();
                    }
                }


                if (roiPointList.Count < 4)
                {
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //												Debug.Log ("mouse X " + Input.mousePosition.x);
                        //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList[roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }

                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray()))
                        {
                            roiRect   = Imgproc.boundingRect(roiPointMat);
                            roiPred   = roiRect.clone();
                            roiSearch = roiRect.clone();
                        }

                        ///////////////////////////////////////////////////////
                        // Kalman Filter Initialize
                        ///////////////////////////////////////////////////////
                        Pe = Matrix.Build.DenseIdentity(9, 9);
                        Vector z1 = roi2center(roiRect);
                        xe = Vector.Build.DenseOfArray(new float[] { z1[0], 0, 0, z1[1], 0, 0, (roiRect.width + roiRect.height) / 2, 0, 0 });

                        ///////////////////////////////////////////////////////
                        // End Kalman Filter Initialize
                        ///////////////////////////////////////////////////////

                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat())
                            {
                                Imgproc.calcHist(new List <Mat>(new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                                //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Core.circle(rgbaMat, points[i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    Core.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                    Core.rectangle(rgbaMat, roiPred.tl(), roiPred.br(), new Scalar(0, 0, 255, 255), 2);
                    Core.rectangle(rgbaMat, roiSearch.tl(), roiSearch.br(), new Scalar(255, 0, 0, 255), 2);
                }

                Core.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);

                //				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            Mat bgraMatClipROI = new Mat(bgraMat, processingAreaRect);

            Imgproc.cvtColor(bgraMatClipROI, grayMat, Imgproc.COLOR_BGRA2GRAY);

            bgMat.copyTo(dstMatClippingROI);

            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);


            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;

                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;
                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;
                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);
            maskMat.put(0, 0, maskPixels);
            grayMat.copyTo(dstMatClippingROI, maskMat);


            Imgproc.Canny(lineMat, lineMat, 20, 120);

            lineMat.copyTo(maskMat);

            Core.bitwise_not(lineMat, lineMat);

            lineMat.copyTo(dstMatClippingROI, maskMat);


            //Imgproc.putText (dstMat, "W:" + dstMat.width () + " H:" + dstMat.height () + " SO:" + Screen.orientation, new Point (5, dstMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (0), 2, Imgproc.LINE_AA, false);

            Imgproc.cvtColor(dstMat, bgraMat, Imgproc.COLOR_GRAY2BGRA);

            //21n descommented to verify si pinta un rectangle
            Imgproc.rectangle(bgraMat, new Point(0, 0), new Point(bgraMat.width(), bgraMat.height()), new Scalar(0, 0, 255, 255), 4);
            Imgproc.rectangle(bgraMat, processingAreaRect.tl(), processingAreaRect.br(), new Scalar(0, 0, 255, 255), 4);
            //

            bgraMatClipROI.Dispose();


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                Utils.fastMatToTexture2D(bgraMat, texture);
                bgraMat.Dispose();

                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);
                position        *= 1.2f;

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }, false);
        }
Example #15
0
        // Update is called once per frame
        void Update()
        {
            #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
            //Touch
            int touchCount = Input.touchCount;
            if (touchCount == 1)
            {
                Touch t = Input.GetTouch(0);
                if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId))
                {
                    storedTouchPoint = new Point(t.position.x, t.position.y);
                    //Debug.Log ("touch X " + t.position.x);
                    //Debug.Log ("touch Y " + t.position.y);
                }
            }
            #else
            //Mouse
            if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject())
            {
                storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y);
                //Debug.Log ("mouse X " + Input.mousePosition.x);
                //Debug.Log ("mouse Y " + Input.mousePosition.y);
            }
            #endif

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                if (storedTouchPoint != null)
                {
                    ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbaMat.cols(), rgbaMat.rows());
                    OnTouch(rgbaMat, storedTouchPoint);
                    storedTouchPoint = null;
                }

                Point[] points = roiPointList.ToArray();

                if (shouldStartCamShift)
                {
                    shouldStartCamShift = false;

                    using (MatOfPoint roiPointMat = new MatOfPoint(points)) {
                        roiRect = Imgproc.boundingRect(roiPointMat);
                    }

                    if (roiHistMat != null)
                    {
                        roiHistMat.Dispose();
                        roiHistMat = null;
                    }
                    roiHistMat = new Mat();

                    using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                        using (Mat maskMat = new Mat()) {
                            Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                            Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                            //Debug.Log ("roiHist " + roiHistMat.ToString ());
                        }
                }
                else if (points.Length == 4)
                {
                    using (Mat backProj = new Mat()) {
                        Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Imgproc.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Imgproc.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }

                    Imgproc.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

                Imgproc.putText(rgbaMat, "Please touch the 4 points surrounding the tracking object.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Example #16
0
    void Update()
    {
        mycap.Play();                                               //开启摄像头
        Utils.webCamTextureToMat(mycap, tu);                        //将摄像头拍摄的场景转化为图片信息。
        Imgproc.cvtColor(tu, a, 37);                                //将RGB空间转化为YUV空间
        Core.split(a, c);                                           //将YUV空间分开, 存到变量C中。
        b = new Mat(480, 640, CvType.CV_8UC1);                      //定义一个单通道的图片变量。
        b = c[1];                                                   //将U通道赋值给b变量,到此就完成了提取U通道。
        Imgproc.threshold(b, b, fa, 255, Imgproc.THRESH_BINARY);    //将b图像转化为二值图
        //Imgproc.threshold(b, b, fa, 255, Imgproc.THRESH_BINARY_INV);
        b.copyTo(b1);                                               //将U通道的信息拷贝给b1变量
        List <MatOfPoint> contours = new List <MatOfPoint>();       //定义一个Matofpoint类型的变量数组,存放图像轮廓信息

        Imgproc.findContours(b, contours, hier, 0, 2);              //寻找b图像中的轮廓,存储到contous变量数组中。
        ////0 1 2 3
        s = 0;                                                      //轮廓面积变量
        x = 0; y = 480;                                             //轮廓的边缘点的坐标变量
        List <MatOfPoint> selectcol = new List <MatOfPoint>();      //定义一个数组,用来存放手势轮廓

        foreach (MatOfPoint i in contours)                          //循环所有轮廓,找出面积大于5000的轮廓
        {
            if (Mathf.Abs((float)Imgproc.contourArea(i)) > 5000)
            {
                selectcol.Add(i);                                      //将轮廓面积大于5000的变量存到selectcol数组中
            }
        }
        if (selectcol.Count == 1)                                      //如果只有一个轮廓,就是手型轮廓
        {
            s     = Mathf.Abs((float)Imgproc.contourArea(selectcol[0]));
            rect1 = Imgproc.boundingRect(selectcol[0]);                //求出轮廓的手型边缘矩形框
            Point[] dian = selectcol[0].toArray();                     //提取轮廓点
            foreach (Point dian1 in dian)
            {
                if (dian1.y < y)                                        //选出轮廓的最低点
                {
                    y = dian1.y;
                    x = dian1.x;
                }
            }
        }
        else if (selectcol.Count == 2)                                  //如果两个轮廓,选择下方的轮廓作为手型轮廓
        {
            if (selectcol[0].toArray()[0].y < selectcol[1].toArray()[0].y)
            {
                s     = Mathf.Abs((float)Imgproc.contourArea(selectcol[0]));
                rect1 = Imgproc.boundingRect(selectcol[0]);
                Point[] dian = selectcol[0].toArray();
                foreach (Point dian1 in dian)
                {
                    if (dian1.y < y)                                     //选出轮廓的最低点
                    {
                        y = dian1.y;
                        x = dian1.x;
                    }
                }
            }
            else                                                           //轮廓较多的情况时,固定x,y,表示没有找到
            {
                s     = Mathf.Abs((float)Imgproc.contourArea(selectcol[1]));
                rect1 = Imgproc.boundingRect(selectcol[1]);
                Point[] dian = selectcol[1].toArray();
                foreach (Point dian1 in dian)
                {
                    if (dian1.y < y)
                    {
                        y = dian1.y;
                        x = dian1.x;
                    }
                }
            }
        }
        else
        {
            x = 320; y = 200;
        }
        //    Debug.Log(selectcol.Count);
        Imgproc.rectangle(b1, rect1.tl(), rect1.br(), color);          //在图像b1上画出矩形框
        Mat jie = new Mat(b1, rect1);                                  //截取图像b1内画出的矩形框

        Imgproc.resize(jie, shishi, size);                             //调整图像的大小,用来在UI界面中显示
    }
Example #17
0
    public void getAnswerNumber(Mat align)
    {
        Mat align_gray = new Mat(), align_edges = new Mat();

        Imgproc.cvtColor(align, align_gray, Imgproc.COLOR_RGB2GRAY);
        Imgproc.Canny(align_gray, align_edges, 50, 50);
        Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2 + 1, 2 + 1), new Point(1, 1));

        Imgproc.dilate(align_edges, align_edges, element);


        //Shape detection
        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(align_edges, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

        List <MatOfPoint> hulls = new List <MatOfPoint>();

        //Texture2D tex = new Texture2D(align_edges.width(), align_edges.height(), TextureFormat.RGB24, false);
        //Utils.matToTexture2D(align_edges, tex);
        //byte[] bytes1 = tex.EncodeToJPG();
        //File.WriteAllBytes("D:/2019/OMR/" + "test.png", bytes1);

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfInt hull_temp = new MatOfInt();
            Imgproc.convexHull(contours[i], hull_temp);
            int[]   arrIndex   = hull_temp.toArray();
            Point[] arrContour = contours[i].toArray();
            Point[] arrPoints  = new Point[arrIndex.Length];

            for (int k = 0; k < arrIndex.Length; k++)
            {
                arrPoints[k] = arrContour[arrIndex[k]];
            }

            MatOfPoint temp = new MatOfPoint();
            temp.fromArray(arrPoints);

            //Filter outliers
            if (Imgproc.contourArea(temp) > 90000 && Imgproc.contourArea(temp) < 110000)
            {
                hulls.Add(temp);
            }
        }

        List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();

        for (int i = 0; i < hulls.Count; i++)
        {
            MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
            hull2f.Add(newPoint);
        }

        List <Rect> rects = new List <Rect>();

        for (int i = 0; i < hulls.Count; i++)
        {
            //Approximate polygon
            MatOfPoint2f approx = new MatOfPoint2f();
            Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
            List <Point> approx_polygon = approx.toList();
            approx_polygon = Scannerproc.filterPolygon(approx_polygon);
            double area = Imgproc.contourArea(approx);

            if (Scannerproc.isSquare(approx_polygon))
            {
                Rect r         = Imgproc.boundingRect(new MatOfPoint(approx_polygon.ToArray()));
                bool isContain = false;
                for (int k = 0; k < rects.Count; k++)
                {
                    if (Scannerproc.distanceTwoPoints(rects[k].tl(), r.tl()) < 100)
                    {
                        //if (rects[k].contains(r) || r.contains(rects[k]))
                        isContain = true;
                    }
                }

                if (!isContain)
                {
                    rects.Add(r);
                    // Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(255, 0, 0, 255), 3);

                    for (int j = 1; j < 21; j++)
                    {
                        Rect roi = new Rect((int)r.tl().x + (int)((r.width * 1.3) / 6), (int)r.tl().y + (r.height / 21) * j, (int)((r.width * 4.7) / 6), r.height / 21);
                        int  num = getAnswerNumber(align, roi);
                        if (num != 0)
                        {
                            Imgproc.putText(align, " " + num, new Point(roi.x - 40, roi.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false);
                            Imgproc.rectangle(align, roi.tl(), roi.br(), new Scalar(0, 255, 0, 255), 2);
                        }
                    }
                }
            }

            //Center of mass
            int cx = 0,
                cy = 0;
            for (int k = 0; k < approx_polygon.Count; k++)
            {
                cx += (int)approx_polygon[k].x;
                cy += (int)approx_polygon[k].y;
            }
            cx /= approx_polygon.Count;
            cy /= approx_polygon.Count;

            // Imgproc.circle(roi, new Point(cx, cy), 5, new Scalar(255), -1);
        }

        if (rects.Count == 4)
        {
            nowDetected = false;
        }
    }
Example #18
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);


                Point[] points = roiPointList.ToArray();

                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat()) {
                        Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                    }

                    #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        if (Input.GetTouch(0).phase == TouchPhase.Ended)
                        {
                            roiPointList.Clear();
                        }
                    }
                    #else
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Clear();
                    }
                    #endif
                }


                if (roiPointList.Count < 4)
                {
                    #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        Touch t = Input.GetTouch(0);
                        if (t.phase == TouchPhase.Ended)
                        {
                            roiPointList.Add(convertScreenPoint(new Point(t.position.x, t.position.y), gameObject, Camera.main));
                            //                                  Debug.Log ("touch X " + t.position.x);
                            //                                  Debug.Log ("touch Y " + t.position.y);

                            if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                            {
                                roiPointList.RemoveAt(roiPointList.Count - 1);
                            }
                        }
                    }
                    #else
                    //Mouse
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //                                              Debug.Log ("mouse X " + Input.mousePosition.x);
                        //                                              Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }
                    #endif


                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray())) {
                            roiRect = Imgproc.boundingRect(roiPointMat);
                        }


                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat()) {
                                Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                                //                                                      Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Imgproc.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Imgproc.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }

                    Imgproc.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

                Imgproc.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);


//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Example #19
0
    /// <summary>
    /// Hands the pose estimation process.
    /// </summary>
    public void handPoseEstimationProcess(Mat rgbaMat)
    {
        //Imgproc.blur(mRgba, mRgba, new Size(5,5));
        Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
        //Imgproc.medianBlur(mRgba, mRgba, 3);

        if (!isColorSelected)
        {
            return;
        }

        List <MatOfPoint> contours = detector.getContours();

        detector.process(rgbaMat);
        //Debug.Log(contours + " | " + contours.Count);
        //string[] output = contours.ToArray();

        for (int i = 0; i < contours.Count; i++)
        {
            //Debug.Log("MatOfPoint2f " + new MatOfPoint2f(contours[i].toArray()) + " | " + i);
            //Debug.Log("MatOfPoint " + contours [i] + " | " + i);
            //Imgproc.circle(rgbaMat, contours[i], 6, new Scalar(0, 255, 0, 255), -1);


            //Debug.Log ("kotka" +  MatOfPoint.ReferenceEquals(x, y));
        }

        if (contours.Count <= 0)
        {
            return;
        }


        RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

        double boundWidth  = rect.size.width;
        double boundHeight = rect.size.height;
        int    boundPos    = 0;

        for (int i = 1; i < contours.Count; i++)
        {
            rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
            if (rect.size.width * rect.size.height > boundWidth * boundHeight)
            {
                boundWidth  = rect.size.width;
                boundHeight = rect.size.height;
                boundPos    = i;
            }
        }

        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
        Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);
        //tochkaX = boundRect.tl ().x;
        //tochkaY = boundRect.tl ().y;
        Imgproc.circle(rgbaMat, boundRect.tl(), 6, new Scalar(0, 255, 0, 255), -1);
        Imgproc.circle(rgbaMat, boundRect.br(), 6, new Scalar(0, 255, 0, 255), -1);
        pointbX = boundRect.br().x;
        pointbY = boundRect.br().y;
        pointaX = boundRect.x;
        pointbY = boundRect.y;
        double a = boundRect.br().y - boundRect.tl().y;

        a = a * 0.7;
        a = boundRect.tl().y + a;
        Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);
        MatOfPoint2f pointMat = new MatOfPoint2f();

        Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
        contours[boundPos] = new MatOfPoint(pointMat.toArray());
        MatOfInt  hull         = new MatOfInt();
        MatOfInt4 convexDefect = new MatOfInt4();

        Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
        if (hull.toArray().Length < 3)
        {
            return;
        }
        Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);
        List <MatOfPoint> hullPoints = new List <MatOfPoint>();
        List <Point>      listPo     = new List <Point>();

        for (int j = 0; j < hull.toList().Count; j++)
        {
            listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
        }
        MatOfPoint e = new MatOfPoint();

        e.fromList(listPo);
        hullPoints.Add(e);
        List <MatOfPoint> defectPoints = new List <MatOfPoint>();
        List <Point>      listPoDefect = new List <Point>();

        for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
        {
            Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
            int   depth    = convexDefect.toList()[j + 3];
            if (depth > 8700 && farPoint.y < a)
            {
                listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
            }
        }

        MatOfPoint e2 = new MatOfPoint();

        e2.fromList(listPo);
        defectPoints.Add(e2);
        Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);
        this.numberOfFingers = listPoDefect.Count;
        if (this.numberOfFingers > 5)
        {
            this.numberOfFingers = 5;
        }
        foreach (Point p in listPoDefect)
        {
            Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
        }
    }
Example #20
0
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);

//						Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours [boundPos].toArray()));
            Core.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

//						Debug.Log (
//						" Row start [" +
//								(int)boundRect.tl ().y + "] row end [" +
//								(int)boundRect.br ().y + "] Col start [" +
//								(int)boundRect.tl ().x + "] Col end [" +
//								(int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

//						Debug.Log (
//						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
            Core.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours [boundPos].toArray()), pointMat, 3, true);
            contours [boundPos] = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours [boundPos].toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contours [boundPos].toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();
            List <Point>      listPo     = new List <Point> ();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours [boundPos].toList() [hull.toList() [j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <MatOfPoint> defectPoints = new List <MatOfPoint> ();
            List <Point>      listPoDefect = new List <Point> ();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours [boundPos].toList() [convexDefect.toList() [j + 2]];
                int   depth    = convexDefect.toList() [j + 3];
                if (depth > threasholdSlider.value && farPoint.y < a)
                {
                    listPoDefect.Add(contours [boundPos].toList() [convexDefect.toList() [j + 2]]);
                }
//								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

//						Debug.Log ("hull: " + hull.toList ());
//						Debug.Log ("defects: " + convexDefect.toList ());

            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

//                      int defectsTotal = (int)convexDefect.total();
//						Debug.Log ("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

//						Debug.Log ("numberOfFingers " + numberOfFingers);

//						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Core.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Example #21
0
    private OpenCVForUnity.Rect BgSub()
    {
        backgroundSubstractorMOG2.apply(rgbMat, fgmaskMat);

        roiRect      = null;
        fgmaskMatRoi = fgmaskMat;

        Mat kernelD = new Mat(40, 40, CvType.CV_8UC1, new Scalar(255, 255, 255));
        Mat kernelE = new Mat(20, 20, CvType.CV_8UC1, new Scalar(255, 255, 255));

        Mat kernelDRoi = new Mat(1, 1, CvType.CV_8UC1, new Scalar(255, 255, 255));
        Mat kernelERoi = new Mat(1, 1, CvType.CV_8UC1, new Scalar(255, 255, 255));

        Imgproc.dilate(fgmaskMat, fgmaskMatDilate, kernelD);
        Imgproc.erode(fgmaskMatDilate, fgmaskMatDilate, kernelE);

        Imgproc.dilate(fgmaskMatRoi, fgmaskMatDilateRoi, kernelDRoi);
        Imgproc.erode(fgmaskMatDilateRoi, fgmaskMatDilateRoi, kernelERoi);

        mask_binary    = new OpenCVForUnity.Mat();
        mask_binaryRoi = new OpenCVForUnity.Mat();

        Imgproc.threshold(fgmaskMatDilate, mask_binary, 123, 255, Imgproc.THRESH_BINARY);
        Imgproc.threshold(fgmaskMatDilateRoi, mask_binaryRoi, 123, 255, Imgproc.THRESH_BINARY);

        List <MatOfPoint> contours = new List <MatOfPoint>();

        OpenCVForUnity.Mat hierarchy = new OpenCVForUnity.Mat();

        Imgproc.findContours(mask_binary, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);

        if (contours.Count == 0)
        {
            return(null);
        }
        else
        {
            int i = 0;
            color1 = new Color(0.8f, 0.8f, 0.95f, 0.25f);
            color2 = new Color(0.8f, 0.8f, 0.95f);
            foreach (MatOfPoint contour in contours)
            {
                //Debug.Log("number of target: " + i);
                MatOfPoint new_mat1 = new MatOfPoint(contour.toArray());
                output = Imgproc.boundingRect(new_mat1);
                rgbMat.copyTo(dest, mask_binaryRoi);
                //SaveMatToFile("mask_binary" + ss, mask_binary);
                //SaveMatToFile("mask_binaryRoi" + ss, mask_binaryRoi);
                Imgproc.rectangle(rgbMat, output.tl(), output.br(), new Scalar(255, 0, 0), 2);
                output_ar.Add(output);
                Vector3          top_left_pos     = new Vector3(output.x, Screen.height - output.y);
                Vector3          bottom_right_pos = new Vector3(output.x + output.width, Screen.height - (output.y + output.height));
                UnityEngine.Rect check_pos        = GetScreenRect(top_left_pos, bottom_right_pos);
                i++;
                if (Input.GetMouseButton(0) && check_pos.Contains(new Vector2(Input.mousePosition.x, Screen.height - Input.mousePosition.y)))
                {
                    Debug.Log("take it!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
                    //skipFrame = 50;
                    //shouldStartCamShift = true;
                    Debug.Log(output);
                    return(output);
                }

                /*else
                 * {
                 *  MatOfPoint new_mat2 = new MatOfPoint(contours[0].toArray()); //prende il blob più grande, è il primo perchè la funzione findcontours mette in ordine dal più grande al più piccolo.
                 *  output = Imgproc.boundingRect(new_mat2);
                 * }*/
            }
            //OnGUI();
            return(null);
        }
    }
        // Use this for initialization
        void Start()
        {
            Texture2D imgTexture = Resources.Load("matchshapes") as Texture2D;

            Mat srcMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC1);

            Utils.texture2DToMat(imgTexture, srcMat);
            Debug.Log("srcMat.ToString() " + srcMat.ToString());

            Mat dstMat = new Mat(srcMat.size(), CvType.CV_8UC3);

            //labeling
            Mat labels    = new Mat();
            Mat stats     = new Mat();
            Mat centroids = new Mat();
            int total     = Imgproc.connectedComponentsWithStats(srcMat, labels, stats, centroids);

            Debug.Log("labels.ToString() " + labels.ToString());
            Debug.Log("stats.ToString() " + stats.ToString());
            Debug.Log("centroids.ToString() " + centroids.ToString());
            Debug.Log("total " + total);

            // determine drawing color
            List <Scalar> colors = new List <Scalar> (total);

            colors.Add(new Scalar(0, 0, 0));
            for (int i = 1; i < total; ++i)
            {
                colors.Add(new Scalar(Random.Range(0, 255), Random.Range(0, 255), Random.Range(0, 255)));
            }

            // draw labels
            for (int i = 0; i < dstMat.rows(); ++i)
            {
                for (int j = 0; j < dstMat.cols(); ++j)
                {
                    Scalar color = colors [(int)labels.get(i, j) [0]];
                    dstMat.put(i, j, color.val [0], color.val [1], color.val [2]);
                }
            }

            // draw rectangle
            for (int i = 1; i < total; ++i)
            {
                int x      = (int)stats.get(i, Imgproc.CC_STAT_LEFT) [0];
                int y      = (int)stats.get(i, Imgproc.CC_STAT_TOP) [0];
                int height = (int)stats.get(i, Imgproc.CC_STAT_HEIGHT) [0];
                int width  = (int)stats.get(i, Imgproc.CC_STAT_WIDTH) [0];

                OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(x, y, width, height);

                Imgproc.rectangle(dstMat, rect.tl(), rect.br(), new Scalar(0, 255, 0), 2);
            }

            // draw centroids
            for (int i = 1; i < total; ++i)
            {
                int x = (int)centroids.get(i, 0) [0];
                int y = (int)centroids.get(i, 1) [0];

                Imgproc.circle(dstMat, new Point(x, y), 3, new Scalar(255, 0, 0), -1);
            }

            // draw index of label
            for (int i = 1; i < total; ++i)
            {
                int x = (int)stats.get(i, Imgproc.CC_STAT_LEFT) [0];
                int y = (int)stats.get(i, Imgproc.CC_STAT_TOP) [0];

                Imgproc.putText(dstMat, "" + i, new Point(x + 5, y + 15), Core.FONT_HERSHEY_COMPLEX, 0.5, new Scalar(255, 255, 0), 2);
            }


            Texture2D texture = new Texture2D(dstMat.cols(), dstMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(dstMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Example #23
0
        // Main loop that runs forever, until the user hits Escape to quit.
        private void recognizeAndTrainUsingWebcam(Mat cameraFrame, CascadeClassifier faceCascade, CascadeClassifier eyeCascade1, CascadeClassifier eyeCascade2)
        {
            if (cameraFrame != null && cameraFrame.empty())
            {
                Debug.LogError("ERROR: Couldn't grab the next camera frame.");
            }

            // Get a copy of the camera frame that we can draw onto.
            Mat displayedFrame = cameraFrame;

            int   cx;
            float current_processingTime     = Time.realtimeSinceStartup;
            float processingTimeDiff_seconds = (current_processingTime - old_processingTime);

            if (processingTimeDiff_seconds > CHANGE_IN_SECONDS_FOR_PROCESSING)
            {
                // Run the face recognition system on the camera image. It will draw some things onto the given image, so make sure it is not read-only memory!
                int identity = -1;

                // Find a face and preprocess it to have a standard size and contrast & brightness.
                Rect  faceRect = new Rect();                                       // Position of detected face.
                Rect  searchedLeftEye = new Rect(), searchedRightEye = new Rect(); // top-left and top-right regions of the face, where eyes were searched.
                Point leftEye = new Point(), rightEye = new Point();               // Position of the detected eyes.

                Mat preprocessedFace = PreprocessFace.GetPreprocessedFace(displayedFrame, faceWidth, faceCascade, eyeCascade1, eyeCascade2, preprocessLeftAndRightSeparately, ref faceRect, ref leftEye, ref rightEye, ref searchedLeftEye, ref searchedRightEye);

                bool gotFaceAndEyes = false;

                if (preprocessedFace != null && !preprocessedFace.empty())
                {
                    gotFaceAndEyes = true;
                }

                // Draw an anti-aliased rectangle around the detected face.
                if (faceRect.width > 0)
                {
                    Imgproc.rectangle(displayedFrame, faceRect.tl(), faceRect.br(), YELLOW, 2, Imgproc.LINE_AA, 0);

                    // Draw light-blue anti-aliased circles for the 2 eyes.
                    Scalar eyeColor = LIGHT_BLUE;
                    if (leftEye.x >= 0)     // Check if the eye was detected
                    {
                        Imgproc.circle(displayedFrame, new Point(faceRect.x + leftEye.x, faceRect.y + leftEye.y), 6, eyeColor, 1, Imgproc.LINE_AA, 0);
                    }
                    if (rightEye.x >= 0)     // Check if the eye was detected
                    {
                        Imgproc.circle(displayedFrame, new Point(faceRect.x + rightEye.x, faceRect.y + rightEye.y), 6, eyeColor, 1, Imgproc.LINE_AA, 0);
                    }
                }

                prev_prepreprocessedFace = preprocessedFace;

                if (m_mode == MODES.MODE_DETECTION)
                {
                    // Don't do anything special.
                }
                else if (m_mode == MODES.MODE_COLLECT_FACES)
                {
                    // Check if we have detected a face.
                    if (gotFaceAndEyes)
                    {
                        // Check if this face looks somewhat different from the previously collected face.
                        double imageDiff = 10000000000.0d;
                        if (old_prepreprocessedFace != null && !old_prepreprocessedFace.empty())
                        {
                            imageDiff = Recognition.GetSimilarity(preprocessedFace, old_prepreprocessedFace);
                        }

                        // Also record when it happened.
                        double current_time     = Time.realtimeSinceStartup;
                        double timeDiff_seconds = (current_time - old_time);

                        // Only process the face if it is noticeably different from the previous frame and there has been noticeable time gap.
                        if ((imageDiff > CHANGE_IN_IMAGE_FOR_COLLECTION) && (timeDiff_seconds > CHANGE_IN_SECONDS_FOR_COLLECTION))
                        {
                            // Also add the mirror image to the training set, so we have more training data, as well as to deal with faces looking to the left or right.
                            Mat mirroredFace = new Mat();
                            Core.flip(preprocessedFace, mirroredFace, 1);

                            // Add the face images to the list of detected faces.
                            preprocessedFaces.Add(preprocessedFace);
                            preprocessedFaces.Add(mirroredFace);
                            faceLabels.Add(m_selectedPerson);
                            faceLabels.Add(m_selectedPerson);

                            // Keep a reference to the latest face of each person.
                            m_latestFaces [m_selectedPerson] = preprocessedFaces.Count - 2;  // Point to the non-mirrored face.
                            // Show the number of collected faces. But since we also store mirrored faces, just show how many the user thinks they stored.
                            Debug.Log("Saved face " + (preprocessedFaces.Count / 2) + " for person " + m_selectedPerson);

                            // Make a white flash on the face, so the user knows a photo has been taken.
                            using (Mat displayedFaceRegion = new Mat(displayedFrame, faceRect)) {
                                Core.add(displayedFaceRegion, DARK_GRAY, displayedFaceRegion);
                            }

                            // Keep a copy of the processed face, to compare on next iteration.
                            old_prepreprocessedFace = preprocessedFace;
                            old_time = current_time;
                        }
                    }
                }
                else if (m_mode == MODES.MODE_TRAINING)
                {
                    // Check if there is enough data to train from. For Eigenfaces, we can learn just one person if we want, but for Fisherfaces,
                    // we need atleast 2 people otherwise it will crash!
                    bool haveEnoughData = true;
                    if (facerecAlgorithm == "FaceRecognizer.Fisherfaces")
                    {
                        if ((m_numPersons < 2) || (m_numPersons == 2 && m_latestFaces [1] < 0))
                        {
                            Debug.Log("Warning: Fisherfaces needs atleast 2 people, otherwise there is nothing to differentiate! Collect more data ...");
                            haveEnoughData = false;
                        }
                    }
                    if (m_numPersons < 1 || preprocessedFaces.Count <= 0 || preprocessedFaces.Count != faceLabels.Count)
                    {
                        Debug.Log("Warning: Need some training data before it can be learnt! Collect more data ...");
                        haveEnoughData = false;
                    }

                    if (haveEnoughData)
                    {
                        // Start training from the collected faces using Eigenfaces or a similar algorithm.
                        model = Recognition.LearnCollectedFaces(preprocessedFaces, faceLabels, facerecAlgorithm);

                        // Show the internal face recognition data, to help debugging.
                        //if (m_debug)
                        //Recognition.ShowTrainingDebugData(model, faceWidth, faceHeight);

                        // Now that training is over, we can start recognizing!
                        m_mode = MODES.MODE_RECOGNITION;
                    }
                    else
                    {
                        // Since there isn't enough training data, go back to the face collection mode!
                        m_mode = MODES.MODE_COLLECT_FACES;
                    }
                }
                else if (m_mode == MODES.MODE_RECOGNITION)
                {
                    prev_identity   = -1;
                    prev_similarity = 100000000.0d;
                    if (reconstructedFace != null && !reconstructedFace.IsDisposed)
                    {
                        reconstructedFace.Dispose();
                    }
                    reconstructedFace = null;

                    if (gotFaceAndEyes && (preprocessedFaces.Count > 0) && (preprocessedFaces.Count == faceLabels.Count))
                    {
                        // Generate a face approximation by back-projecting the eigenvectors & eigenvalues.
                        reconstructedFace = Recognition.ReconstructFace(model, preprocessedFace);

                        // Verify whether the reconstructed face looks like the preprocessed face, otherwise it is probably an unknown person.
                        double similarity = Recognition.GetSimilarity(preprocessedFace, reconstructedFace);
                        double confidence = 0.0d;

                        string outputStr;
                        if (similarity < UNKNOWN_PERSON_THRESHOLD)
                        {
                            int[]    predictedLabel      = new int [1];
                            double[] predictedConfidence = new double [1];
                            // Identify who the person is in the preprocessed face image.
                            model.predict(preprocessedFace, predictedLabel, predictedConfidence);
                            identity   = predictedLabel [0];
                            confidence = predictedConfidence [0];

                            outputStr     = identity.ToString();
                            prev_identity = identity;
                        }
                        else
                        {
                            // Since the confidence is low, assume it is an unknown person.
                            outputStr = "Unknown";
                        }
                        prev_similarity = similarity;
                        Debug.Log("Identity: " + outputStr + ". Similarity: " + similarity + ". Confidence: " + confidence);
                    }
                }
                else if (m_mode == MODES.MODE_DELETE_ALL)
                {
                    // Restart everything!
                    dispose();

                    // Restart in Detection mode.
                    m_mode = MODES.MODE_DETECTION;
                }
                else
                {
                    Debug.LogError("ERROR: Invalid run mode " + m_mode);
                    //exit(1);
                }

                old_processingTime = current_processingTime;
            }

            // Show the help, while also showing the number of collected faces. Since we also collect mirrored faces, we should just
            // tell the user how many faces they think we saved (ignoring the mirrored faces), hence divide by 2.
            strBuilder.Length = 0;
            Rect rcHelp = new Rect();

            if (m_mode == MODES.MODE_DETECTION)
            {
                strBuilder.Append("Click [Add Person] when ready to collect faces.");
            }
            else if (m_mode == MODES.MODE_COLLECT_FACES)
            {
                strBuilder.Append("Click anywhere to train from your ");
                strBuilder.Append(preprocessedFaces.Count / 2);
                strBuilder.Append(" faces of ");
                strBuilder.Append(m_numPersons);
                strBuilder.Append(" people.");
            }
            else if (m_mode == MODES.MODE_TRAINING)
            {
                strBuilder.Append("Please wait while your ");
                strBuilder.Append(preprocessedFaces.Count / 2);
                strBuilder.Append(" faces of ");
                strBuilder.Append(m_numPersons);
                strBuilder.Append(" people builds.");
            }
            else if (m_mode == MODES.MODE_RECOGNITION)
            {
                strBuilder.Append("Click people on the right to add more faces to them, or [Add Person] for someone new.");
            }

            if (strBuilder.Length > 0)
            {
                // Draw it with a black background and then again with a white foreground.
                // Since BORDER may be 0 and we need a negative position, subtract 2 from the border so it is always negative.
                float txtSize = 0.4f;
                drawString(displayedFrame, strBuilder.ToString(), new Point(BORDER, -BORDER - 2), BLACK, txtSize);              // Black shadow.
                rcHelp = drawString(displayedFrame, strBuilder.ToString(), new Point(BORDER + 1, -BORDER - 1), WHITE, txtSize); // White text.
            }

            // Show the current mode.
            strBuilder.Length = 0;
            if (m_mode >= 0 && m_mode < MODES.MODE_END)
            {
                strBuilder.Append(" people builds.");
                strBuilder.Append(MODE_NAMES [(int)m_mode]);
                drawString(displayedFrame, strBuilder.ToString(), new Point(BORDER, -BORDER - 2 - rcHelp.height), BLACK);     // Black shadow
                drawString(displayedFrame, strBuilder.ToString(), new Point(BORDER + 1, -BORDER - 1 - rcHelp.height), GREEN); // Green text
            }

            // Show the current preprocessed face in the top-center of the display.
            cx = (displayedFrame.cols() - faceWidth) / 2;
            if (prev_prepreprocessedFace != null && !prev_prepreprocessedFace.empty())
            {
                // Get a RGBA version of the face, since the output is RGBA color.
                using (Mat srcRGBA = new Mat(prev_prepreprocessedFace.size(), CvType.CV_8UC4)) {
                    Imgproc.cvtColor(prev_prepreprocessedFace, srcRGBA, Imgproc.COLOR_GRAY2RGBA);
                    // Get the destination ROI (and make sure it is within the image!).
                    Rect dstRC = new Rect(cx, BORDER, faceWidth, faceHeight);
                    using (Mat dstROI = new Mat(displayedFrame, dstRC)) {
                        // Copy the pixels from src to dst.
                        srcRGBA.copyTo(dstROI);
                    }
                }
            }

            // Draw an anti-aliased border around the face, even if it is not shown.
            Imgproc.rectangle(displayedFrame, new Point(cx - 1, BORDER - 1), new Point(cx - 1 + faceWidth + 2, BORDER - 1 + faceHeight + 2), LIGHT_GRAY, 1, Imgproc.LINE_AA, 0);

            // Show the most recent face for each of the collected people, on the right side of the display.
            m_gui_faces_left = displayedFrame.cols() - BORDER - faceWidth;
            m_gui_faces_top  = BORDER;
            for (int i = 0; i < m_numPersons; i++)
            {
                int index = m_latestFaces [i];
                if (index >= 0 && index < preprocessedFaces.Count)
                {
                    Mat srcGray = preprocessedFaces [index];
                    if (srcGray != null && !srcGray.empty())
                    {
                        // Get a RGBA version of the face, since the output is RGBA color.
                        using (Mat srcRGBA = new Mat(srcGray.size(), CvType.CV_8UC4)) {
                            Imgproc.cvtColor(srcGray, srcRGBA, Imgproc.COLOR_GRAY2RGBA);
                            // Get the destination ROI (and make sure it is within the image!).
                            int  y     = Mathf.Min(m_gui_faces_top + i * faceHeight, displayedFrame.rows() - faceHeight);
                            Rect dstRC = new Rect(m_gui_faces_left, y, faceWidth, faceHeight);
                            using (Mat dstROI = new Mat(displayedFrame, dstRC)) {
                                // Copy the pixels from src to dst.
                                srcRGBA.copyTo(dstROI);
                            }
                        }
                    }
                }
            }

            // Highlight the person being collected, using a red rectangle around their face.
            if (m_mode == MODES.MODE_COLLECT_FACES)
            {
                if (m_selectedPerson >= 0 && m_selectedPerson < m_numPersons)
                {
                    int  y  = Mathf.Min(m_gui_faces_top + m_selectedPerson * faceHeight, displayedFrame.rows() - faceHeight);
                    Rect rc = new Rect(m_gui_faces_left, y, faceWidth, faceHeight);
                    Imgproc.rectangle(displayedFrame, rc.tl(), rc.br(), RED, 3, Imgproc.LINE_AA, 0);
                }
            }

            // Highlight the person that has been recognized, using a green rectangle around their face.
            if (m_mode == MODES.MODE_RECOGNITION && prev_identity >= 0 && prev_identity < 1000)
            {
                int  y  = Mathf.Min(m_gui_faces_top + prev_identity * faceHeight, displayedFrame.rows() - faceHeight);
                Rect rc = new Rect(m_gui_faces_left, y, faceWidth, faceHeight);
                Imgproc.rectangle(displayedFrame, rc.tl(), rc.br(), GREEN, 3, Imgproc.LINE_AA, 0);
            }

            if (m_mode == MODES.MODE_RECOGNITION)
            {
                if (m_debug)
                {
                    if (reconstructedFace != null && !reconstructedFace.empty())
                    {
                        cx = (displayedFrame.cols() - faceWidth) / 2;
                        Point rfDebugBottomRight = new Point(cx + faceWidth * 2 + 5, BORDER + faceHeight);
                        Point rfDebugTopLeft     = new Point(cx + faceWidth + 5, BORDER);
                        Rect  rfDebugRC          = new Rect(rfDebugTopLeft, rfDebugBottomRight);
                        using (Mat srcRGBA = new Mat(reconstructedFace.size(), CvType.CV_8UC4)) {
                            Imgproc.cvtColor(reconstructedFace, srcRGBA, Imgproc.COLOR_GRAY2RGBA);
                            using (Mat dstROI = new Mat(displayedFrame, rfDebugRC)) {
                                srcRGBA.copyTo(dstROI);
                            }
                        }
                        Imgproc.rectangle(displayedFrame, rfDebugTopLeft, rfDebugBottomRight, LIGHT_GRAY, 1, Imgproc.LINE_AA, 0);
                    }
                }

                // Show the confidence rating for the recognition in the mid-top of the display.
                cx = (displayedFrame.cols() - faceWidth) / 2;
                Point ptBottomRight = new Point(cx - 5, BORDER + faceHeight);
                Point ptTopLeft     = new Point(cx - 15, BORDER);
                // Draw a gray line showing the threshold for an "unknown" person.
                Point ptThreshold = new Point(ptTopLeft.x, ptBottomRight.y - (1.0 - UNKNOWN_PERSON_THRESHOLD) * faceHeight);
                Imgproc.rectangle(displayedFrame, ptThreshold, new Point(ptBottomRight.x, ptThreshold.y), LIGHT_GRAY, 1, Imgproc.LINE_AA, 0);
                // Crop the confidence rating between 0.0 to 1.0, to show in the bar.
                double confidenceRatio = 1.0d - Math.Min(Math.Max(prev_similarity, 0.0d), 1.0d);
                Point  ptConfidence    = new Point(ptTopLeft.x, ptBottomRight.y - confidenceRatio * faceHeight);
                // Show the light-blue confidence bar.
                Imgproc.rectangle(displayedFrame, ptConfidence, ptBottomRight, LIGHT_BLUE, Core.FILLED, Imgproc.LINE_AA, 0);
                // Show the gray border of the bar.
                Imgproc.rectangle(displayedFrame, ptTopLeft, ptBottomRight, LIGHT_GRAY, 1, Imgproc.LINE_AA, 0);
            }

            /*
             * // If the user wants all the debug data, show it to them!
             * if (m_debug)
             * {
             *  Mat face = new Mat();
             *  if (faceRect.width > 0)
             *  {
             *      face = new Mat(cameraFrame, faceRect);
             *      if (searchedLeftEye.width > 0 && searchedRightEye.width > 0)
             *      {
             *          Mat topLeftOfFace = new Mat(face, searchedLeftEye);
             *          Mat topRightOfFace = new Mat(face, searchedRightEye);
             *          //imshow("topLeftOfFace", topLeftOfFace);
             *          //imshow("topRightOfFace", topRightOfFace);
             *      }
             *  }
             *
             *  //if (model != null)
             *      //showTrainingDebugData(model, faceWidth, faceHeight);
             * }
             */
        }
        // Main loop that runs forever, until the user hits Escape to quit.
        private void recognizeAndTrainUsingWebcam(Mat cameraFrame, CascadeClassifier faceCascade, CascadeClassifier eyeCascade1, CascadeClassifier eyeCascade2)
        {
            if (cameraFrame != null && cameraFrame.total() == 0)
            {
                Debug.LogError("ERROR: Couldn't grab the next camera frame.");
            }

            // Get a copy of the camera frame that we can draw onto.
            Mat displayedFrame = cameraFrame;

            int   cx;
            float current_processingTime     = Time.realtimeSinceStartup;
            float processingTimeDiff_seconds = (current_processingTime - old_processingTime);

            if (processingTimeDiff_seconds > CHANGE_IN_SECONDS_FOR_PROCESSING)
            {
                // Run the face recognition system on the camera image. It will draw some things onto the given image, so make sure it is not read-only memory!
                int identity = -1;

                // Find a face and preprocess it to have a standard size and contrast & brightness.
                Rect  faceRect = new Rect();                                       // Position of detected face.
                Rect  searchedLeftEye = new Rect(), searchedRightEye = new Rect(); // top-left and top-right regions of the face, where eyes were searched.
                Point leftEye = new Point(), rightEye = new Point();               // Position of the detected eyes.

                Mat preprocessedFace = PreprocessFace.GetPreprocessedFace(displayedFrame, faceWidth, faceCascade, eyeCascade1, eyeCascade2, preprocessLeftAndRightSeparately, ref faceRect, ref leftEye, ref rightEye, ref searchedLeftEye, ref searchedRightEye);

                bool gotFaceAndEyes = false;

                if (preprocessedFace != null && preprocessedFace.total() > 0)
                {
                    gotFaceAndEyes = true;
                }

                // Draw an anti-aliased rectangle around the detected face.
                if (faceRect.width > 0)
                {
                    Imgproc.rectangle(displayedFrame, faceRect.tl(), faceRect.br(), YELLOW, 2, Imgproc.LINE_AA, 0);

                    // Draw light-blue anti-aliased circles for the 2 eyes.
                    Scalar eyeColor = LIGHT_BLUE;
                    if (leftEye.x >= 0)
                    {   // Check if the eye was detected
                        Imgproc.circle(displayedFrame, new Point(faceRect.x + leftEye.x, faceRect.y + leftEye.y), 6, eyeColor, 1, Imgproc.LINE_AA, 0);
                    }
                    if (rightEye.x >= 0)
                    {   // Check if the eye was detected
                        Imgproc.circle(displayedFrame, new Point(faceRect.x + rightEye.x, faceRect.y + rightEye.y), 6, eyeColor, 1, Imgproc.LINE_AA, 0);
                    }
                }

                prev_prepreprocessedFace = preprocessedFace;

                if (m_mode == R_MODES.MODE_DETECTION)
                {
                    // Don't do anything special.
                }
                else if (m_mode == R_MODES.MODE_RECOGNITION)
                {
                    prev_identity   = -1;
                    prev_similarity = 100000000.0d;
                    if (reconstructedFace != null && !reconstructedFace.IsDisposed)
                    {
                        reconstructedFace.Dispose();
                    }
                    reconstructedFace = null;

                    if (gotFaceAndEyes && (preprocessedFaces.Count > 0) && (preprocessedFaces.Count == faceLabels.Count))
                    {
                        // Generate a face approximation by back-projecting the eigenvectors & eigenvalues.
                        reconstructedFace = Recognition.ReconstructFace(model, preprocessedFace);

                        // Verify whether the reconstructed face looks like the preprocessed face, otherwise it is probably an unknown person.
                        double similarity = Recognition.GetSimilarity(preprocessedFace, reconstructedFace);
                        double confidence = 0.0d;

                        string outputStr;
                        if (similarity < UNKNOWN_PERSON_THRESHOLD)
                        {
                            int[]    predictedLabel      = new int[1];
                            double[] predictedConfidence = new double[1];
                            // Identify who the person is in the preprocessed face image.
                            model.predict(preprocessedFace, predictedLabel, predictedConfidence);
                            identity   = predictedLabel[0];
                            confidence = predictedConfidence[0];

                            outputStr     = identity.ToString();
                            prev_identity = identity;

                            //Display name
                            strBuilder.Length = 0;
                            Rect rcHelp = new Rect();
                            strBuilder.Append(GameManager.instance.personsNames[prev_identity]);

                            if (strBuilder.Length > 0)
                            {
                                // Draw it with a black background and then again with a white foreground.
                                // Since BORDER may be 0 and we need a negative position, subtract 2 from the border so it is always negative.
                                float txtSize = 2.5f;
                                drawString(displayedFrame, strBuilder.ToString(), new Point(faceRect.tl().x, faceRect.tl().y), YELLOW, txtSize);                                 // White text.
                            }
                        }
                        else
                        {
                            // Since the confidence is low, assume it is an unknown person.
                            outputStr = "Unknown";
                        }
                        prev_similarity = similarity;
                        Debug.Log("Identity: " + outputStr + ". Similarity: " + similarity + ". Confidence: " + confidence);
                    }
                }
                else if (m_mode == R_MODES.MODE_DELETE_ALL)
                {
                    // Restart everything!
                    dispose();

                    // Restart in Detection mode.
                    m_mode = R_MODES.MODE_DETECTION;
                }
                else
                {
                    Debug.LogError("ERROR: Invalid run mode " + m_mode);
                    //exit(1);
                }

                old_processingTime = current_processingTime;
            }

            // Show the help, while also showing the number of collected faces. Since we also collect mirrored faces, we should just
            // tell the user how many faces they think we saved (ignoring the mirrored faces), hence divide by 2.

            /*strBuilder.Length = 0;
             * Rect rcHelp = new Rect();
             * if (m_mode == R_MODES.MODE_DETECTION)
             * {
             *  strBuilder.Append("Click [Add Person] when ready to collect faces.");
             * }
             * else if (m_mode == R_MODES.MODE_COLLECT_FACES)
             * {
             *  strBuilder.Append(preprocessedFaces.Count / 2);
             *  strBuilder.Append(" faces of ");
             *  strBuilder.Append(m_numPersons);
             *  strBuilder.Append(" people.");
             * }
             * else if (m_mode == R_MODES.MODE_RECOGNITION)
             *  strBuilder.Append("Click people on the right to add more faces to them, or [Add Person] for someone new.");
             *
             * if (strBuilder.Length > 0)
             * {
             *  // Draw it with a black background and then again with a white foreground.
             *  // Since BORDER may be 0 and we need a negative position, subtract 2 from the border so it is always negative.
             *  float txtSize = 0.7f;
             *  drawString(displayedFrame, strBuilder.ToString(), new Point(BORDER, -BORDER - 2), BLACK, txtSize); // Black shadow.
             *  rcHelp = drawString(displayedFrame, strBuilder.ToString(), new Point(BORDER + 1, -BORDER - 1), WHITE, txtSize); // White text.
             * }
             *
             * // Show the current mode.
             * /*strBuilder.Length = 0;
             * if (m_mode >= 0 && m_mode < R_MODES.MODE_END) {
             *  strBuilder.Append (" people builds.");
             *  strBuilder.Append (MODE_NAMES [(int)m_mode]);
             *  drawString (displayedFrame, strBuilder.ToString (), new Point (BORDER, -BORDER - 2 - rcHelp.height), BLACK); // Black shadow
             *  drawString (displayedFrame, strBuilder.ToString (), new Point (BORDER + 1, -BORDER - 1 - rcHelp.height), GREEN); // Green text
             * }*/

            // Show the current preprocessed face in the top-center of the display.

            /*cx = (displayedFrame.cols() - faceWidth) / 2;
             * if (prev_prepreprocessedFace != null && prev_prepreprocessedFace.total() > 0)
             * {
             *  // Get a RGBA version of the face, since the output is RGBA color.
             *  using (Mat srcRGBA = new Mat(prev_prepreprocessedFace.size(), CvType.CV_8UC4))
             *  {
             *      Imgproc.cvtColor(prev_prepreprocessedFace, srcRGBA, Imgproc.COLOR_GRAY2RGBA);
             *      // Get the destination ROI (and make sure it is within the image!).
             *      Rect dstRC = new Rect(cx, BORDER, faceWidth, faceHeight);
             *      using (Mat dstROI = new Mat(displayedFrame, dstRC))
             *      {
             *          // Copy the pixels from src to dst.
             *          srcRGBA.copyTo(dstROI);
             *      }
             *  }
             * }
             *
             * // Draw an anti-aliased border around the face, even if it is not shown.
             * Imgproc.rectangle(displayedFrame, new Point(cx - 1, BORDER - 1), new Point(cx - 1 + faceWidth + 2, BORDER - 1 + faceHeight + 2), LIGHT_GRAY, 1, Imgproc.LINE_AA, 0);
             */

            // Show the most recent face for each of the collected people, on the right side of the display.

            /*m_gui_faces_left = displayedFrame.cols() - BORDER - faceWidth;
             * m_gui_faces_top = BORDER;
             * for (int i = 0; i < m_numPersons; i++)
             * {
             *  int index = m_latestFaces[i];
             *  if (index >= 0 && index < preprocessedFaces.Count)
             *  {
             *      Mat srcGray = preprocessedFaces[index];
             *      if (srcGray != null && srcGray.total() > 0)
             *      {
             *          // Get a RGBA version of the face, since the output is RGBA color.
             *          using (Mat srcRGBA = new Mat(srcGray.size(), CvType.CV_8UC4))
             *          {
             *              Imgproc.cvtColor(srcGray, srcRGBA, Imgproc.COLOR_GRAY2RGBA);
             *              // Get the destination ROI (and make sure it is within the image!).
             *              int y = Mathf.Min(m_gui_faces_top + i * faceHeight, displayedFrame.rows() - faceHeight);
             *              Rect dstRC = new Rect(m_gui_faces_left, y, faceWidth, faceHeight);
             *              using (Mat dstROI = new Mat(displayedFrame, dstRC))
             *              {
             *                  // Copy the pixels from src to dst.
             *                  srcRGBA.copyTo(dstROI);
             *              }
             *          }
             *      }
             *  }
             * }*/

            // Highlight the person being collected, using a red rectangle around their face.

            /* if (m_mode == R_MODES.MODE_COLLECT_FACES)
             * {
             *   if (m_selectedPerson >= 0 && m_selectedPerson < m_numPersons)
             *   {
             *       int y = Mathf.Min(m_gui_faces_top + m_selectedPerson * faceHeight, displayedFrame.rows() - faceHeight);
             *       Rect rc = new Rect(m_gui_faces_left, y, faceWidth, faceHeight);
             *       Imgproc.rectangle(displayedFrame, rc.tl(), rc.br(), RED, 3, Imgproc.LINE_AA, 0);
             *   }
             * }*/

            // Highlight the person that has been recognized, using a green rectangle around their face.

            /*if (m_mode == R_MODES.MODE_RECOGNITION && prev_identity >= 0 && prev_identity < 1000)
             * {
             *   int y = Mathf.Min(m_gui_faces_top + prev_identity * faceHeight, displayedFrame.rows() - faceHeight);
             *   Rect rc = new Rect(m_gui_faces_left, y, faceWidth, faceHeight);
             *   Imgproc.rectangle(displayedFrame, rc.tl(), rc.br(), GREEN, 3, Imgproc.LINE_AA, 0);
             * }
             *
             * if (m_mode == R_MODES.MODE_RECOGNITION)
             * {*/
            /*if (m_debug)
             * {
             *   if (reconstructedFace != null && reconstructedFace.total() > 0)
             *   {
             *       cx = (displayedFrame.cols() - faceWidth) / 2;
             *       Point rfDebugBottomRight = new Point(cx + faceWidth * 2 + 5, BORDER + faceHeight);
             *       Point rfDebugTopLeft = new Point(cx + faceWidth + 5, BORDER);
             *       Rect rfDebugRC = new Rect(rfDebugTopLeft, rfDebugBottomRight);
             *       using (Mat srcRGBA = new Mat(reconstructedFace.size(), CvType.CV_8UC4))
             *       {
             *           Imgproc.cvtColor(reconstructedFace, srcRGBA, Imgproc.COLOR_GRAY2RGBA);
             *           using (Mat dstROI = new Mat(displayedFrame, rfDebugRC))
             *           {
             *               srcRGBA.copyTo(dstROI);
             *           }
             *       }
             *       Imgproc.rectangle(displayedFrame, rfDebugTopLeft, rfDebugBottomRight, LIGHT_GRAY, 1, Imgproc.LINE_AA, 0);
             *   }
             * }*/

            // Show the confidence rating for the recognition in the mid-top of the display.

            /*cx = (displayedFrame.cols() - faceWidth) / 2;
             * Point ptBottomRight = new Point(cx - 5, BORDER + faceHeight);
             * Point ptTopLeft = new Point(cx - 15, BORDER);
             * // Draw a gray line showing the threshold for an "unknown" person.
             * Point ptThreshold = new Point(ptTopLeft.x, ptBottomRight.y - (1.0 - UNKNOWN_PERSON_THRESHOLD) * faceHeight);
             * Imgproc.rectangle(displayedFrame, ptThreshold, new Point(ptBottomRight.x, ptThreshold.y), LIGHT_GRAY, 1, Imgproc.LINE_AA, 0);
             * // Crop the confidence rating between 0.0 to 1.0, to show in the bar.
             * double confidenceRatio = 1.0d - Math.Min(Math.Max(prev_similarity, 0.0d), 1.0d);
             * Point ptConfidence = new Point(ptTopLeft.x, ptBottomRight.y - confidenceRatio * faceHeight);
             * // Show the light-blue confidence bar.
             * Imgproc.rectangle(displayedFrame, ptConfidence, ptBottomRight, LIGHT_BLUE, Core.FILLED, Imgproc.LINE_AA, 0);
             * // Show the gray border of the bar.
             * Imgproc.rectangle(displayedFrame, ptTopLeft, ptBottomRight, LIGHT_GRAY, 1, Imgproc.LINE_AA, 0);*/

            //}

            /*
             * // If the user wants all the debug data, show it to them!
             * if (m_debug)
             * {
             *  Mat face = new Mat();
             *  if (faceRect.width > 0)
             *  {
             *      face = new Mat(cameraFrame, faceRect);
             *      if (searchedLeftEye.width > 0 && searchedRightEye.width > 0)
             *      {
             *          Mat topLeftOfFace = new Mat(face, searchedLeftEye);
             *          Mat topRightOfFace = new Mat(face, searchedRightEye);
             *          //imshow("topLeftOfFace", topLeftOfFace);
             *          //imshow("topRightOfFace", topRightOfFace);
             *      }
             *  }
             *
             *  //if (model != null)
             *      //showTrainingDebugData(model, faceWidth, faceHeight);
             * }
             */
        }
Example #25
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());


                Mat rgbaMatClipROI = new Mat(rgbaMat, processingAreaRect);

                rgbaMatClipROI.copyTo(processingAreaMat);


                // fill all black.
                Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 0), -1);


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, processingAreaMat);

                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (isUsingDlibFaceDetecter)
                {
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(processingAreaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                    Imgproc.equalizeHist(grayMat, grayMat);

                    cascade.detectMultiScale(grayMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(grayMat.cols() * 0.15, grayMat.cols() * 0.15), new Size());

                    detectResult = faces.toList();


                    // Adjust to Dilb's result.
                    foreach (OpenCVForUnity.Rect r in detectResult)
                    {
                        r.y += (int)(r.height * 0.1f);
                    }
                }


                foreach (var rect in detectResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMatClipROI, points, new Scalar(0, 255, 0, 255), 2);

                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMatClipROI, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(255, 0, 0, 255), 2);
                }

                Imgproc.putText(rgbaMatClipROI, "W:" + rgbaMatClipROI.width() + " H:" + rgbaMatClipROI.height() + " SO:" + Screen.orientation, new Point(5, rgbaMatClipROI.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 0, 0, 255), 1, Imgproc.LINE_AA, false);


                Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(255, 0, 0, 255), 2);

                // Draw prosessing area rectangle.
                Imgproc.rectangle(rgbaMat, processingAreaRect.tl(), processingAreaRect.br(), new Scalar(255, 255, 0, 255), 2);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbaMat, texture);

                rgbaMatClipROI.Dispose();
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;
                ;
                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                texture.wrapMode = TextureWrapMode.Clamp;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }
        }
Example #26
0
    void _Process()
    {
        binaryMatCreator = new BinaryMatCreator();
        binaryMatCreator.setCrUpper(cr_threshold_upper);
        binaryMatCreator.setCrLower(cr_threshold_lower);
        binaryMatCreator.setSUpper(s_threshold_upper);
        binaryMatCreator.setSLower(s_threshold_lower);
        binaryMatCreator.setVUpper(v_threshold_upper);
        binaryMatCreator.setVLower(v_threshold_lower);

        Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
        Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);
        Mat cameraMat   = rgbMat;
        int imageWidth  = cameraMat.width();
        int imageHeight = cameraMat.height();

        var hSVChannels   = ARUtil.getHSVChannels(cameraMat);
        var yCrCbChannels = ARUtil.getYCrCbChannels(cameraMat);

        OpenCVForUnity.Rect searchRect = new OpenCVForUnity.Rect(0, 0, cameraMat.cols(), cameraMat.rows());


        // 領域判定のスコア
        scorer = new MaguroScorer(searchRect);


        try
        {
            // カメラ入力画像から, searchRectサイズの二値画像を生成
            Mat binaryROIMat = binaryMatCreator.createBinaryMat(cameraMat, searchRect);

            // 二値画像&探索領域矩形で輪郭探索
            var contours = ARUtil.findContours(binaryROIMat, searchRect.tl());

            // 領域候補群を作成 -> 候補にスコアをつける -> 最大スコアの候補を取得
            var regionSet = new RegionCandidateSet(contours)
                            .elliminateByArea(searchRect, 0.01, 0.9)
                            .score(scorer)
                            .sort();

            if (regionSet.candidates.Count == 0)
            {
                print("first candidates is 0");
                goto show;
            }

            var count   = 0;
            var regions = new List <Region>();
            foreach (var candidate in regionSet.candidates)
            {
                if (count > 2)
                {
                    break;
                }
                if (candidate == null)
                {
                    print("candite is null");
                    break;
                }
                // 領域作成
                foodRegion = new Region(candidate, cameraMat);
                regions.Add(foodRegion);
                count++;
            }


            //var regions = new List<Region>();
            //var cr_refined_threshold = 125;

            //foreach (var candidate in regionSet.candidates)
            //{

            //    // 領域作成
            //    var region = new Region(candidate, cameraMat);
            //    var refinedRegion = region.createRefienedRegion(yCrCbChannels[1], cr_refined_threshold);
            //    if (refinedRegion != null)
            //    {
            //        regions.Add(refinedRegion);
            //    }
            //}

            //var filteredRegions = Region.elliminateByInclusionRect(regions);


            // 食品領域に貼りつけるテクスチャ作成
            var texture = _textureCreator.create(cameraMat, regions);

            if (texture == null)
            {
                print("regions is empty");
                goto show;
            }

            Mat alphaMask = Mat.zeros(cameraMat.size(), CvType.CV_8U);
            foreach (var region in regions)
            {
                Core.add(region.mask, alphaMask, alphaMask);
            }

            if (willChange)
            {
                _textureCreator.alphaBlend(cameraMat, texture, alphaMask, alpha);
            }

            if (shouldDrawRect)
            {
                // foodRegion.drawRect(matForDeveloper);
                Imgproc.rectangle(cameraMat, searchRect.tl(), searchRect.br(), new Scalar(0, 0, 255), 3);
            }
        }
        catch (System.Exception e)
        {
            print(e);
            goto show;
        }

show:
        outputScreenQuad.setMat(cameraMat);
    }
Example #27
0
        // Update is called once per frame
        void Update()
        {
            // Debug.Log("Did update this frame: " + webCamTextureToMatHelper.didUpdateThisFrame());
            // Debug.Log("WebCam Texture is playing: " + webCamTextureToMatHelper.isPlaying());
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                ///////////////////////////////////////////////////////////////
                // Acquire the next frame from the camera and undistort if necessary
                ///////////////////////////////////////////////////////////////
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (AppControl.control.calibrationComplete && false)
                {
                    Mat rgbaMatUndistorted = rgbaMat.clone();
                    Imgproc.undistort(rgbaMat, rgbaMatUndistorted, AppControl.control.cameraMatrix, AppControl.control.distCoeffs);
                    rgbaMat = rgbaMatUndistorted;
                }

                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                ///////////////////////////////////////////////////////////////
                // If the object color spectrum is initialized, find the object
                ///////////////////////////////////////////////////////////////
                // Debug.Log("Number of ROI points: " + roiPointList.Count);
                Point[] points = roiPointList.ToArray();
                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat()) {
                        Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);
                        // Video.meanShift(backProj, roiRect, termination);
                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                        if (roiRect.height > 0 && roiRect.width > 0)
                        {
                            objectFound = true;
                            AppControl.control.TargetBox = roiRect;
                            // Debug.Log("Object found: " + objectFound);
                        }
                        else
                        {
                            objectFound = false;
                        }
                    }

                    // Estimate the 3D position of the object
                    if (objectFound && AppControl.control.calibrationComplete)
                    {
                        float fx = AppControl.control.fx;                       // focal length x-axis
                        float fy = AppControl.control.fy;                       // focal length y-axis
                        float cx = AppControl.control.cx;                       // focal length x-axis
                        float cy = AppControl.control.cy;                       // focal length y-axis
                        float xu = (roiRect.x) + 0.5f * roiRect.width - cx;     // undistorted object center along x-axis in pixels
                        float yu = -((roiRect.y) + 0.5f * roiRect.height - cy); // undistorted object center along y-axis in pixels
                        float du = (roiRect.height + roiRect.width) / 2f;       // Size of the object in pixels
                        float pz = (objectSize / du) * (1 / Mathf.Sqrt(1f / Mathf.Pow(fx, 2) + 1f / Mathf.Pow(fy, 2)));
                        float px = xu * pz / fx;
                        float py = yu * pz / fy;
                        objectPosition = new Vector3(px, py, pz); // in units of mm
                        target         = GameObject.FindWithTag("Target");
                        stick          = GameObject.FindWithTag("Breadboard");
                        if (target != null && stick != null)
                        {
                            target.transform.position          = mainCamera.transform.position + 0.001f * objectPosition; // in units of meters
                            AppControl.control.camObservations = new double[] { xu, yu, du };
                            AppControl.control.writeData();
                            // stick.transform.rotation = mainCamera.transform.rotation * stick.transform.rotation * Quaternion.Inverse(mainCamera.transform.rotation);
                            // stick.transform.position = stick.transform.position
                        }
                    }
                    else
                    {
                        objectPosition = new Vector3(0, 0, 0);
                    }


                    // not necessary to reset the object tracking by clicking

                    /*
                     #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                     *  //Touch
                     *  int touchCount = Input.touchCount;
                     *  if (touchCount == 1)
                     *  {
                     *      if(Input.GetTouch(0).phase == TouchPhase.Ended){
                     *          roiPointList.Clear ();
                     *      }
                     *  }
                     #else
                     * if (Input.GetMouseButtonUp (0)) {
                     *      roiPointList.Clear ();
                     * }
                     #endif
                     */
                }


                ///////////////////////////////////////////////////////////////
                // Capture the ROI from user input; compute the HSV histogram
                // of the ROI
                ///////////////////////////////////////////////////////////////
                if (roiPointList.Count < 4)
                {
                    #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        Touch t = Input.GetTouch(0);
                        if (t.phase == TouchPhase.Ended)
                        {
                            roiPointList.Add(convertScreenPoint(new Point(t.position.x, t.position.y), gameObject, Camera.main));
//									Debug.Log ("touch X " + t.position.x);
//									Debug.Log ("touch Y " + t.position.y);

                            if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                            {
                                roiPointList.RemoveAt(roiPointList.Count - 1);
                            }
                        }
                    }
                    #else
                    // Capture mouse input events and add the points to the ROI List
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //												Debug.Log ("mouse X " + Input.mousePosition.x);
                        //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }
                    #endif

                    // If the user has selected four points, lock in the ROI; compute
                    // the HSV histogram of the ROI
                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray())) {
                            roiRect = Imgproc.boundingRect(roiPointMat);
                        }

                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat()) {
                                Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);
                                //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }


                ///////////////////////////////////////////////////////////////
                // Draw the an image that displays where the user has pressed
                ///////////////////////////////////////////////////////////////
                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Core.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Core.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }
                    Core.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

                Core.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);
//				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Example #28
0
    void _Process()
    {
        binaryMatCreator = new BinaryMatCreator();
        binaryMatCreator.setCrUpper(cr_threshold_upper);
        binaryMatCreator.setCrLower(cr_threshold_lower);
        binaryMatCreator.setSUpper(s_threshold_upper);
        binaryMatCreator.setSLower(s_threshold_lower);
        binaryMatCreator.setVUpper(v_threshold_upper);
        binaryMatCreator.setVLower(v_threshold_lower);

        Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
        Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

        int imageWidth  = Screen.width;
        int imageHeight = Screen.height;
        Mat cameraMat   = new Mat(new Size(imageWidth, imageHeight), CvType.CV_8UC3);

        Imgproc.resize(rgbMat, cameraMat, cameraMat.size());

        //Mat cameraMat = new Mat(rgbaMat.size(), CvType.CV_8UC3);
        //Imgproc.cvtColor(rgbaMat, cameraMat, Imgproc.COLOR_RGBA2RGB);


        //var hsvChs = ARUtil.getHSVChannels(cameraMat);
        //var yCrCbChs = ARUtil.getYCrCbChannels(cameraMat);
        //var sBy = new Mat(cameraMat.size(), CvType.CV_8UC1);
        //var crBy = new Mat(cameraMat.size(), CvType.CV_8UC1);
        //var vBy = new Mat(cameraMat.size(), CvType.CV_8UC1);
        //Core.inRange(hsvChs[1], new Scalar(s_threshold_lower), new Scalar(s_threshold_upper), sBy);
        //Core.inRange(yCrCbChs[1], new Scalar(cr_threshold_lower), new Scalar(cr_threshold_upper), crBy);
        //Core.inRange(hsvChs[2], new Scalar(v_threshold_lower), new Scalar(v_threshold_upper), vBy);
        //camQuad2.setMat(sBy);
        //camQuad3.setMat(crBy);
        //camQuad4.setMat(vBy);


        /* 初期化 */
        // 探索領域矩形
        OpenCVForUnity.Rect searchRect = new OpenCVForUnity.Rect(0, 0, cameraMat.cols(), cameraMat.rows());


        // 領域判定のスコアラー
        IScorer scorer = null;

        if (loopCount == 0)
        {
            scorer = new OrangeScorer(searchRect, searchRect);
        }
        else
        {
            scorer = new OrangeScorer(searchRect, foodRegion.rect);
        }

        try
        {
            // カメラ入力画像から, searchRectサイズの二値画像を生成
            Mat binaryROIMat = binaryMatCreator.createBinaryMat(cameraMat, searchRect);

            // 二値画像&探索領域矩形で輪郭探索
            var contours = ARUtil.findContours(binaryROIMat, searchRect.tl());

            // 飲料領域候補群を作成 -> 円形度で除外 -> 候補にスコアをつける -> 最大スコアの候補を取得
            var regionSet = new RegionCandidateSet(contours)
                            .elliminateByArea(searchRect, 0.01, 0.9)
                            .elliminateByCircularity(0.2)
                            .score(scorer)
                            .sort();

            var count   = 0;
            var regions = new List <Region>();
            foreach (var candidate in regionSet.candidates)
            {
                if (count > 2)
                {
                    break;
                }
                if (candidate == null)
                {
                    print("candite is null");
                    break;
                }
                // 領域作成
                foodRegion = new Region(candidate, cameraMat);
                regions.Add(foodRegion);
                count++;
            }

            // Regionのマスクを合体
            Mat alphaMask = Mat.zeros(cameraMat.size(), CvType.CV_8U);
            foreach (var region in regions)
            {
                Core.add(region.mask, alphaMask, alphaMask);
            }


            // テクスチャ作成
            var decorateTextureMat = _textureCreator.create(cameraMat, alphaMask, H_sourceMean, 0, 0);

            if (willChange)
            {
                // アルファブレンド
                ARUtil.alphaBlend(cameraMat, decorateTextureMat, alpha, alphaMask);
            }

            if (shouldDrawRect)
            {
                // 検出領域を矩形描画
                Imgproc.rectangle(cameraMat, foodRegion.rect.tl(), foodRegion.rect.br(), new Scalar(0, 255, 0), 3);
            }
        }
        catch (System.Exception e)
        {
            print(e);
            goto show;
        }

show:
        outputScreenQuad.setMat(cameraMat);
        loopCount++;
    }
        //手を検出して画像に描画する
        private static void _handPoseEstimationProcess(Mat rgbaMat, Color handColor)
        {
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);

            //検出器に色を設定
            detector.setHsvColor(HGColorSpuiter.ColorToScalar(handColor));

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);
            if (contours.Count <= 0)
            {
                return;
            }

            //手の角度に傾いた外接矩形を作成
            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
            //手首までの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(WristRangeColor), 2, 8, 0);

            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //手のひらの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), HGColorSpuiter.ColorToScalar(PalmsRangeColor), 2, 8, 0);

            //折れ線カーブまたはポリゴンを,互いの距離が指定された精度以下になるように,より少ない頂点数のカーブやポリゴンで近似します
            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
            contours[boundPos] = new MatOfPoint(pointMat.toArray());

            //点とポリゴンの最短距離を計算
            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
            if (hull.toArray().Length < 3)
            {
                return;
            }
            Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);

            //手の範囲を取得
            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            //手の範囲を描画
            Imgproc.drawContours(rgbaMat, hullPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

            //指と認識した場所を取得
            List <MatOfPoint> defectPoints = new List <MatOfPoint>();
            List <Point>      listPoDefect = new List <Point>();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
                int   depth    = convexDefect.toList()[j + 3];
                if (depth > depthThreashold && farPoint.y < a)
                {
                    listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
                }
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

            //検出した指の本数を更新
            numberOfFingers = listPoDefect.Count;
            if (numberOfFingers > 5)
            {
                numberOfFingers = 5;
            }

            //指の間に点を描画
            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, HGColorSpuiter.ColorToScalar(BetweenFingersColor), -1);
            }
        }
Example #30
0
        void Update()
        {
            Texture2D imgTexture = Resources.Load("vcl_kd_WITH_LOADING_CONTROL") as Texture2D;
            Mat       imgMat     = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, imgMat);
            Mat grayMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGB2GRAY);              //grayscale
            Mat gray2Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Core.bitwise_not(grayMat, gray2Mat);
            Mat gray3Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Imgproc.threshold(gray2Mat, gray3Mat, thresh, maxval, Imgproc.THRESH_BINARY_INV | Imgproc.THRESH_OTSU);
            //Imgproc.threshold (gray2Mat, gray2Mat, thresh, maxval, Imgproc.THRESH_BINARY);

            Core.bitwise_not(gray3Mat, gray3Mat);
            Mat procMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Core.bitwise_and(gray2Mat, gray3Mat, procMat);             //multiply the mask of foreground by the original image to obtain 0 background and original values in foreground
            Mat labels    = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
            Mat stats     = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
            Mat centroids = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
            int total     = Imgproc.connectedComponentsWithStats(gray3Mat, labels, stats, centroids);

            for (int i = 1; i < total; ++i)
            {
                int xx = (int)centroids.get(i, 0) [0];
                int yy = (int)centroids.get(i, 1) [0];

                Imgproc.circle(procMat, new Point(xx, yy), 3, new Scalar(255, 255, 0), -1);

                int x      = (int)stats.get(i, Imgproc.CC_STAT_LEFT) [0];
                int y      = (int)stats.get(i, Imgproc.CC_STAT_TOP) [0];
                int height = (int)stats.get(i, Imgproc.CC_STAT_HEIGHT) [0];
                int width  = (int)stats.get(i, Imgproc.CC_STAT_WIDTH) [0];
                int area   = (int)stats.get(i, Imgproc.CC_STAT_AREA) [0];

                if (area > 30)
                {
                    OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(x, y, width, height);
                    Imgproc.rectangle(procMat, rect.tl(), rect.br(), new Scalar(255, 255, 0), 2);

                    Mat    blotMat = imgMat.submat(rect);
                    Scalar meanVa  = Core.mean(blotMat);
                    int    meanV   = (int)meanVa.val [0];
                    double blotV   = 0;
                    Imgproc.putText(procMat, "I" + i + "/" + (area * meanV), new Point(x, y - 50), Core.FONT_HERSHEY_PLAIN, 1, new Scalar(255, 122, 0), 2);
                }
            }

            Mat hierarchy = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
            List <MatOfPoint> blotContours = new List <MatOfPoint>();

            Imgproc.findContours(gray3Mat, blotContours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(procMat, texture);

            GetComponent <RawImage> ().texture = (Texture2D)texture;
        }