コード例 #1
0
        public void ProcessFinger(Mat rgbaImage)
        {
            Imgproc.pyrDown(rgbaImage, mPyrDownMat);
            Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
            Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA);
            Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb);

            Core.inRange(mHsvMat, fLowerBoundHSV, fUpperBoundHSV, fMaskHSV);

            fMask = fMaskHSV;

            Imgproc.dilate(fMask, fDilatedMask, new Mat());

            List <MatOfPoint> contoursFinger = new List <MatOfPoint>();

            Imgproc.findContours(fDilatedMask, contoursFinger, fHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            if (contoursFinger.Count == 0)
            {
                FingerContour = null;
                return;
            }

            // Find max contour area
            double     maxArea        = 0;
            MatOfPoint biggestContour = null;

            foreach (MatOfPoint each in contoursFinger)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.contourArea(wrapper);
                if (area > maxArea)
                {
                    maxArea        = area;
                    biggestContour = each;
                }
            }
            if (maxArea < 130)
            {
                FingerContour = null;
                return;
            }

            //Debug.Log("Finger contour area" + maxArea.ToString());

            MatOfPoint2f contours_res2f = new MatOfPoint2f();

            MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray());

            Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true);
            FingerContour = new MatOfPoint(contours_res2f.toArray());
            contours_res2f.Dispose();
            biggestContour2f.Dispose();
            if (Imgproc.contourArea(FingerContour) > mMinContourArea * maxArea)
            {
                Core.multiply(FingerContour, new Scalar(4, 4), FingerContour);
            }
        }
コード例 #2
0
        /// <summary>
        /// Points the of vertices.
        /// </summary>
        /// <param name="contour">Contour.</param>
        private static void _pointOfVertices(Mat rgbaMat, MatOfPoint contour)
        {
            //multiplyでガウシアンピラミッドで分解されたサイズを掛け算で実画像サイズに戻す
            Core.multiply(contour, new Scalar(4, 4), contour);

            //輪郭の頂点がまだらにあるので識別しやすいようにポリゴン近似でサンプリングする。
            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());
        }
コード例 #3
0
        private void EstimateHand(Mat mat, List <MatOfPoint> contours, RecordHandDetectResult resultSetter)
        {
            //画像処理としてはcontourがあったが、今調べてる側については
            if (contours.Count == 0)
            {
                resultSetter.HasValidHandArea = false;
                return;
            }

            var contour = SelectLargestContour(contours);

            var boundRect = Imgproc.boundingRect(contour);
            //画像の下側で手首の凹み部分を検出することがあるのを、指の凹みと誤認識しないためのガードです。
            double defectMinY = boundRect.y + boundRect.height * 0.7;

            var pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            var handArea       = Imgproc.minAreaRect(pointMat);
            var handAreaCenter = handArea.center;
            var handAreaSize   = handArea.size;

            //方向固定のBoundを使うとこう。
            resultSetter.HandAreaCenter   = new Vector2(boundRect.x + boundRect.width / 2, boundRect.y + boundRect.height / 2);
            resultSetter.HandAreaSize     = new Vector2(boundRect.width, boundRect.height);
            resultSetter.HandAreaRotation = (float)handArea.angle;

            //OBBを使うとこうなるが、これだけだとangleが45度超えてるときの挙動が直感に反する事があるので要注意
            // resultSetter.HandAreaCenter = new Vector2((float)handAreaCenter.x, (float)handAreaCenter.y);
            // resultSetter.HandAreaSize = new Vector2((float)handAreaSize.width, (float)handAreaSize.height);
            // resultSetter.HandAreaRotation = (float)handArea.angle;

            Imgproc.convexHull(contour, _hullIndices);
            var hullIndicesArray = _hullIndices.toArray();

            //通常ありえないが、凸包がちゃんと作れてないケース
            if (hullIndicesArray.Length < 3)
            {
                resultSetter.HasValidHandArea = false;
                return;
            }

            UpdateConvexityDefection(contour, _hullIndices, defectMinY, resultSetter);
        }
コード例 #4
0
ファイル: CamShiftSample.cs プロジェクト: 60m/OpenCVForUnity
        /// <summary>
        /// Converts the screen point.
        /// </summary>
        /// <returns>The screen point.</returns>
        /// <param name="screenPoint">Screen point.</param>
        /// <param name="quad">Quad.</param>
        /// <param name="cam">Cam.</param>
        static Point convertScreenPoint(Point screenPoint, GameObject quad, Camera cam)
        {
            Vector2 tl;
            Vector2 tr;
            Vector2 br;
            Vector2 bl;

            if (Input.deviceOrientation == DeviceOrientation.Portrait || Input.deviceOrientation == DeviceOrientation.PortraitUpsideDown)
            {
                tl = cam.WorldToScreenPoint(new Vector3(quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                tr = cam.WorldToScreenPoint(new Vector3(quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                br = cam.WorldToScreenPoint(new Vector3(quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                bl = cam.WorldToScreenPoint(new Vector3(quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
            }
            else
            {
                tl = cam.WorldToScreenPoint(new Vector3(quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                tr = cam.WorldToScreenPoint(new Vector3(quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                br = cam.WorldToScreenPoint(new Vector3(quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                bl = cam.WorldToScreenPoint(new Vector3(quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
            }

            Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2);
            Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2);


            srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
            dstRectMat.put(0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y);


            Mat perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat);

//						Debug.Log ("srcRectMat " + srcRectMat.dump ());
//						Debug.Log ("dstRectMat " + dstRectMat.dump ());
//						Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ());

            MatOfPoint2f srcPointMat = new MatOfPoint2f(screenPoint);
            MatOfPoint2f dstPointMat = new MatOfPoint2f();

            Core.perspectiveTransform(srcPointMat, dstPointMat, perspectiveTransform);

//						Debug.Log ("srcPointMat " + srcPointMat.dump ());
//						Debug.Log ("dstPointMat " + dstPointMat.dump ());

            return(dstPointMat.toArray() [0]);
        }
コード例 #5
0
    Point convertScreenPoint(Point screenPoint, GameObject quad, Camera cam)
    {
        Vector2 tl;
        Vector2 tr;
        Vector2 br;
        Vector2 bl;

        tl = cam.WorldToScreenPoint(
            new Vector3(quad.transform.localPosition.x - quad.transform.localScale.x / 2,
                        quad.transform.localPosition.y + quad.transform.localScale.y / 2,
                        quad.transform.localPosition.z));
        tr = cam.WorldToScreenPoint(
            new Vector3(quad.transform.localPosition.x + quad.transform.localScale.x / 2,
                        quad.transform.localPosition.y + quad.transform.localScale.y / 2,
                        quad.transform.localPosition.z));
        br = cam.WorldToScreenPoint(
            new Vector3(quad.transform.localPosition.x + quad.transform.localScale.x / 2,
                        quad.transform.localPosition.y - quad.transform.localScale.y / 2,
                        quad.transform.localPosition.z));
        bl = cam.WorldToScreenPoint(
            new Vector3(quad.transform.localPosition.x - quad.transform.localScale.x / 2,
                        quad.transform.localPosition.y - quad.transform.localScale.y / 2,
                        quad.transform.localPosition.z));
        Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2);
        Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2);

        srcRectMat.put(0, 0, 0, cameraTexture.height, cameraTexture.width,
                       cameraTexture.height, cameraTexture.width, 0, 0, 0);
        dstRectMat.put(0, 0, 0.0, 0.0, cameraTexture.width,
                       0.0, cameraTexture.width, cameraTexture.height,
                       0.0, cameraTexture.height);
        Mat          perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat);
        MatOfPoint2f srcPointMat          = new MatOfPoint2f(screenPoint);
        MatOfPoint2f dstPointMat          = new MatOfPoint2f();

        Core.perspectiveTransform(srcPointMat, dstPointMat, perspectiveTransform);
        return(dstPointMat.toArray()[0]);
    }
コード例 #6
0
        //タップ座標を取得
        private static Point _convertScreenPoint(Mat rgbaMat, Point screenPoint)
        {
            //台形補正を行いタップされた位置の座標を正確に取得する
            Vector2 tl = Camera.main.WorldToScreenPoint(new Vector3(-rgbaMat.width() / 2, rgbaMat.height() / 2));
            Vector2 tr = Camera.main.WorldToScreenPoint(new Vector3(rgbaMat.width() / 2, rgbaMat.height() / 2));
            Vector2 br = Camera.main.WorldToScreenPoint(new Vector3(rgbaMat.width() / 2, -rgbaMat.height() / 2));
            Vector2 bl = Camera.main.WorldToScreenPoint(new Vector3(-rgbaMat.width() / 2, -rgbaMat.height() / 2));

            Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2);
            Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2);

            srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
            dstRectMat.put(0, 0, 0.0, 0.0, rgbaMat.width(), 0.0, rgbaMat.width(), rgbaMat.height(), 0.0, rgbaMat.height());

            Mat perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat);

            MatOfPoint2f srcPointMat = new MatOfPoint2f(screenPoint);
            MatOfPoint2f dstPointMat = new MatOfPoint2f();

            Core.perspectiveTransform(srcPointMat, dstPointMat, perspectiveTransform);

            return(dstPointMat.toArray()[0]);
        }
コード例 #7
0
    // Update is called once per frame
    void Update()
    {
        Resources.UnloadUnusedAssets(); //Fixes the memory leak

        //Get new picture from camera
        imgTexture = new Texture2D(webcamTexture.width, webcamTexture.height);
        imgTexture.SetPixels(webcamTexture.GetPixels());
        imgTexture.Apply();

        Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imgTexture, imgMat);

        Mat maskMat   = new Mat();
        Mat maskMatOP = new Mat();

        Mat grayMat = new Mat();

        Imgproc.dilate(imgMat, imgMat, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(1, 1)));

        //Grayscale the picture
        Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGB2GRAY);

        //Blur the picture
        Imgproc.GaussianBlur(grayMat, grayMat, new Size(3, 3), 1);

        Imgproc.equalizeHist(grayMat, grayMat);

        //Find Edges
        Mat edgesOfPicture = new Mat();

        Imgproc.Canny(grayMat, edgesOfPicture, 75, 225);

        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(edgesOfPicture, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);



        MatOfPoint2f matOfPoint2f = new MatOfPoint2f();
        MatOfPoint2f approxCurve  = new MatOfPoint2f();
        List <Rect>  rectPre      = new List <Rect>();
        List <Rect>  rectAfter    = new List <Rect>();


        try
        {
            List <MatOfPoint2f> kvadrater = new List <MatOfPoint2f>();
            for (int idx = 0; idx >= 0; idx = (int)hierarchy.get(0, idx)[0])
            {
                MatOfPoint contour     = contours[idx];
                Rect       rect        = Imgproc.boundingRect(contour);
                double     contourArea = Imgproc.contourArea(contour);
                matOfPoint2f.fromList(contour.toList());

                Imgproc.approxPolyDP(matOfPoint2f, approxCurve, Imgproc.arcLength(matOfPoint2f, true) * 0.02, true);
                long total = approxCurve.total();


                if (total > 0)
                {
                    kvadrater.Add(approxCurve);
                    ArrayList cos    = new ArrayList();
                    Point[]   points = approxCurve.toArray();

                    for (int j = 2; j < total + 1; j++)
                    {
                        cos.Add(angle(points[(int)(j % total)], points[j - 2], points[j - 1]));
                    }

                    cos.Sort();
                    Double minCos = (Double)cos[0];
                    Double maxCos = (Double)cos[cos.Count - 1];
                    bool   isRect = total == 4 && minCos >= -0.1 && maxCos <= 0.3;
                    //List<double[]> Colors = new List<double[]>();



                    if (isRect)
                    {
                        if (rect.width > 20)
                        {
                            rectPre.Add(rect);
                        }
                        List <Color>    Colors       = new List <Color>();
                        List <double[]> colorDoubles = new List <double[]>();
                        for (int op = 0; op < 9; op++)
                        {
                            if (rectPre.Count == 9)
                            {
                                // print("Pre verify: " + rectPre.ToString());
                                //rectPre = CoordinateVerifier.Verify(rectPre); Använd inte LINQ !! ! ! ! !
                                // print("After verify: " + rectPre.ToString());
                                var punkt = imgTexture.GetPixel(rect.x + (rect.width / 2), rect.y + (rect.height / 2));
                                Imgproc.putText(imgMat, op.ToString(), new Point(rectPre[op].x + 20, rectPre[op].y + 30), Core.FONT_HERSHEY_DUPLEX, 3, new Scalar(200));
                                Rgb rgb = new Rgb(punkt.r, punkt.g, punkt.b);
                                // print("rect[" + op + "] was found at" + rect.x.ToString() + "and y: " + rect.y.ToString());
                                var    hsv  = rgb.To <Hsv>();
                                String farg = "Ingen farg";

                                if (hsv.H >= 45 && hsv.H <= 70)
                                {
                                    farg = "Gul";
                                }
                                if (hsv.H >= 10 && hsv.H <= 45)
                                {
                                    farg = "Orange";
                                }

                                // print(farg);
                                Colors.Clear();
                                for (int q = 0; q < rectPre.Count; q++)
                                {
                                    Color[] blockOfColour = imgTexture.GetPixels(rectPre[q].x + (rectPre[q].width / 2), rectPre[q].y + (rectPre[q].height / 2), rectPre[q].width / 3, rectPre[q].height / 3, 0);

                                    float r = 0, g = 0, b = 0;
                                    foreach (Color pixelBlock in blockOfColour)
                                    {
                                        r += pixelBlock.r;
                                        g += pixelBlock.g;
                                        b += pixelBlock.b;
                                    }
                                    r = r / blockOfColour.Length;
                                    g = g / blockOfColour.Length;
                                    b = b / blockOfColour.Length;

                                    var eColor = _colorDetection.ColorEnumFromScalarColor(new double[] { r * 255, g * 255, b * 255 });
                                    var color  = ColorDetection.UnityColorFromEnum(eColor);
                                    Colors.Add(color);
                                }

                                if (Colors.Count == 9)
                                {
                                    ColorTracker.Instance.addToTemp(Colors);
                                    foreach (Color c in Colors)
                                    {
                                        // print(c.ToString());
                                    }
                                }
                            }
                        }
                        Imgproc.drawContours(imgMat, contours, idx, new Scalar(255, 100, 155), 4);
                    }
                }
            }
        }
        catch (ArgumentOutOfRangeException e)
        {
        }

        Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(imgMat, texture);
        gameObject.GetComponent <Renderer>().material.mainTexture = texture;
    }
コード例 #8
0
ファイル: Main.cs プロジェクト: eugenejahn/ARNoteTaking
        //public RawImage document;

        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat mainMat = webCamTextureToMatHelper.GetMat();


                if (!selectTarget) //find paper by contours
                {
                    grayMat = new Mat();

                    // convert texture to matrix
                    mainMat.copyTo(grayMat);

                    mainMat = findPaper(mainMat);

                    // display matrix on the screen
                    Utils.fastMatToTexture2D(mainMat, texture);
                }
                else
                { // using optical flow
                    // set the currentGrayMat mat
                    currentGrayMat = new Mat(mainMat.rows(), mainMat.cols(), Imgproc.COLOR_RGB2GRAY);
                    Imgproc.cvtColor(mainMat, currentGrayMat, Imgproc.COLOR_RGBA2GRAY);


                    if (initOpticalFlow == true) // doing the init setting for optical flow
                    {
                        // create 40 points
                        Point[] points = new Point[40];
                        // set those points near the corner
                        // paperCornerMatOfPoint  is the corner of the paper
                        for (int i = 0; i < 4; i++)
                        {
                            points[i * 10]     = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 1] = new Point(paperCornerMatOfPoint.toList()[i].x + 1, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 2] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y + 1);
                            points[i * 10 + 3] = new Point(paperCornerMatOfPoint.toList()[i].x + 1, paperCornerMatOfPoint.toList()[i].y + 1);
                            points[i * 10 + 4] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y - 1);
                            points[i * 10 + 5] = new Point(paperCornerMatOfPoint.toList()[i].x - 1, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 6] = new Point(paperCornerMatOfPoint.toList()[i].x - 2, paperCornerMatOfPoint.toList()[i].y - 1);
                            points[i * 10 + 7] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y - 2);
                            points[i * 10 + 8] = new Point(paperCornerMatOfPoint.toList()[i].x - 2, paperCornerMatOfPoint.toList()[i].y - 2);
                            points[i * 10 + 9] = new Point(paperCornerMatOfPoint.toList()[i].x + 2, paperCornerMatOfPoint.toList()[i].y + 2);
                        }

                        // make the points closer to the corners (Harris Corner Detection )
                        //Imgproc.goodFeaturesToTrack(currentGrayMat, corners, 40, qualityLevel, minDistance, none, blockSize, false, 0.04);
                        //Imgproc.goodFeaturesToTrack(currentGrayMat, corners, 40,0.05,20);

                        corners.fromArray(points);

                        prevFeatures.fromList(corners.toList());
                        currentFeatures.fromList(corners.toList());
                        prevGrayMat = currentGrayMat.clone();

                        // won't go back t again
                        initOpticalFlow = false;


                        // not that useful lol
                        // create random color
                        // not working now
                        for (int i = 0; i < maxCorners; i++)
                        {
                            color.Add(new Scalar((int)(Random.value * 255), (int)(Random.value * 255),
                                                 (int)(Random.value * 255), 255));
                        }
                    }
                    else
                    {
                        // Don't want ball move
                        //currentFeatures.fromArray(prevFeatures.toArray());


                        // want ball move
                        prevFeatures.fromArray(currentFeatures.toArray());

                        // optical flow it will changes the valu of currentFeatures
                        Video.calcOpticalFlowPyrLK(prevGrayMat, currentGrayMat, prevFeatures, currentFeatures, mMOBStatus, err);
                        //Debug.Log(st.rows());

                        // change to points list
                        List <Point> prevList  = prevFeatures.toList(),
                                     nextList  = currentFeatures.toList();
                        List <byte> byteStatus = mMOBStatus.toList();


                        int x = 0;
                        int y = byteStatus.Count - 1;

                        for (x = 0; x < y; x++)
                        {
                            if (byteStatus[x] == 1)
                            {
                                Point pt  = nextList[x];
                                Point pt2 = prevList[x];

                                Imgproc.circle(mainMat, pt, 10, new Scalar(0, 0, 255), -1);

                                Imgproc.line(mainMat, pt, pt2, new Scalar(0, 0, 255));
                            }
                        }

                        // draw the data
                        //for (int i = 0; i < prevList.Count; i++)
                        //{
                        //    //Imgproc.circle(frame, prevList[i], 5, color[10]);
                        //    Imgproc.circle(mainMat, nextList[i], 10, new Scalar(0, 0, 255), -1);

                        //    Imgproc.line(mainMat, prevList[i], nextList[i], color[20]);
                        //}


                        List <List <Point> > cornersFeatures = new List <List <Point> >(40);
                        cornersFeatures.Add(new List <Point>(10));

                        // put the corners features data into the list
                        int  tmp  = 0;
                        bool last = true;
                        for (int i = 0; i < nextList.Count - 1; i++)
                        {
                            if (Mathf.Abs((float)(nextList[i].x - nextList[i + 1].x)) < 10 && Mathf.Abs((float)(nextList[i].y - nextList[i + 1].y)) < 10)
                            {
                                if (last == true)
                                {
                                    cornersFeatures[tmp].Add(nextList[i]);
                                }
                                else
                                {
                                    cornersFeatures.Add(new List <Point>(10));
                                    tmp = tmp + 1;
                                    cornersFeatures[tmp].Add(nextList[i]);
                                }
                                last = true;
                            }
                            else
                            {
                                last = false;
                            }
                        }

                        // count corners
                        int manyCornersFeatures = 0;
                        for (int i = 0; i < cornersFeatures.Count; i++)
                        {
                            Debug.Log(cornersFeatures[i].Count);
                            if (cornersFeatures[i].Count < 5)
                            {
                                cornersFeatures.RemoveAt(i);
                            }
                            else
                            {
                                manyCornersFeatures++;
                            }
                        }

                        //Debug.Log("Length" + manyCornersFeatures);

                        // if corners equal 4 then diplay virtual docunment into the frame
                        // doing the perspective transform
                        if (manyCornersFeatures == 4)
                        {
                            Mat documentMat = new Mat(document.height, document.width, CvType.CV_8UC3);
                            Utils.texture2DToMat(document, documentMat);

                            List <Point> srcPoints = new List <Point>();
                            srcPoints.Add(new Point(0, 0));
                            srcPoints.Add(new Point(documentMat.cols(), 0));
                            srcPoints.Add(new Point(documentMat.cols(), documentMat.rows()));
                            srcPoints.Add(new Point(0, documentMat.rows()));


                            Mat srcPointsMat = Converters.vector_Point_to_Mat(srcPoints, CvType.CV_32F);


                            List <Point> dstPoints = new List <Point>()
                            {
                                cornersFeatures[0][0], cornersFeatures[1][0], cornersFeatures[2][0], cornersFeatures[3][0]
                            };
                            Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);


                            //Make perspective transform
                            Mat m         = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat);
                            Mat warpedMat = new Mat(new Size(), documentMat.type());
                            Debug.Log((cornersFeatures[1][0].x - cornersFeatures[0][0].x) + " " + (cornersFeatures[2][0].y - cornersFeatures[1][0].y));
                            Imgproc.warpPerspective(documentMat, warpedMat, m, mainMat.size(), Imgproc.INTER_LINEAR);
                            //warpedMat.convertTo(warpedMat, CvType.CV_32F);


                            //warpedMat.convertTo(warpedMat, CvType.CV_8UC3);
                            warpedMat.convertTo(warpedMat, CvType.CV_8UC3);
                            // same size as frame
                            Mat dst = new Mat(mainMat.size(), CvType.CV_8UC3);
                            //Mat dst = new Mat(frame.size(), CvType.CV_8UC3);
                            //Mat dst2 = new Mat();

                            Imgproc.cvtColor(mainMat, dst, Imgproc.COLOR_RGBA2RGB);

                            //dst.setTo(new Scalar(0, 255, 0));
                            //currentGrayMat.copyTo(dst);
                            //dst.convertTo(dst, CvType.CV_8UC3);


                            //Imgproc.cvtColor(currentGrayMat, frame, Imgproc.COLOR_GRAY2RGBA);

                            Mat img1 = new Mat();
                            Mat mask = new Mat(mainMat.size(), CvType.CV_8UC1, new Scalar(0));
                            Imgproc.cvtColor(warpedMat, img1, Imgproc.COLOR_RGB2GRAY);
                            Imgproc.Canny(img1, img1, 100, 200);
                            List <MatOfPoint> doc_contours = new List <MatOfPoint>();;
                            Imgproc.findContours(img1, doc_contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
                            Imgproc.drawContours(mask, doc_contours, -1, new Scalar(255), Core.FILLED);

                            warpedMat.copyTo(dst, mask);

                            dst.convertTo(dst, CvType.CV_8UC3);

                            Debug.Log("dst" + dst.size());


                            Imgproc.cvtColor(dst, mainMat, Imgproc.COLOR_RGB2RGBA);


                            // display on the right
                            Texture2D finalTextue = new Texture2D(dst.width(), dst.height(), TextureFormat.RGB24, false);
                            Utils.matToTexture2D(dst, finalTextue);

                            targetRawImage.texture = finalTextue;
                        }


                        // current frame to old frame
                        prevGrayMat = currentGrayMat.clone();



                        //Imgproc.cvtColor(currentGrayMat, frame, Imgproc.COLOR_GRAY2RGBA);

                        // display matrix on the screen
                        Utils.fastMatToTexture2D(mainMat, texture);
                    }
                }
            }
        }
コード例 #9
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                hsvMat.Dispose ();
                                if (roiHistMat != null)
                                        roiHistMat.Dispose ();
                                roiPointList.Clear ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        hsvMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC3);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif

            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = (((float)Screen.height/(float)Screen.width) * (float)webCamTexture.height) / 2.0f;
            #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }

                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                                Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                                Point[] points = roiPointList.ToArray ();

                                if (roiPointList.Count == 4) {

                                        using (Mat backProj = new Mat ()) {
                                                Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0);

                                                RotatedRect r = Video.CamShift (backProj, roiRect, termination);
                                                r.points (points);
                                        }

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {

                                if(Input.GetTouch(0).phase == TouchPhase.Ended){

                                    roiPointList.Clear ();
                                }

                            }
                                        #else
                                        if (Input.GetMouseButtonUp (0)) {
                                                roiPointList.Clear ();
                                        }
            #endif
                                }

                                if (roiPointList.Count < 4) {

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {
                                Touch t = Input.GetTouch(0);
                                if(t.phase == TouchPhase.Ended){
                                    roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main));
            //									Debug.Log ("touch X " + t.position.x);
            //									Debug.Log ("touch Y " + t.position.y);

                                    if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                    }
                                }

                            }
            #else
                                        //Mouse
                                        if (Input.GetMouseButtonUp (0)) {

                                                roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
            //												Debug.Log ("mouse X " + Input.mousePosition.x);
            //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                                                if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                                }
                                        }
            #endif

                                        if (roiPointList.Count == 4) {

                                                using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) {
                                                        roiRect = Imgproc.boundingRect (roiPointMat);
                                                }

                                                if (roiHistMat != null) {
                                                        roiHistMat.Dispose ();
                                                        roiHistMat = null;
                                                }
                                                roiHistMat = new Mat ();

                                                using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                                                using (Mat maskMat = new Mat ()) {

                                                        Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180));
                                                        Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

            //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                                                }
                                        }
                                }

                                if (points.Length < 4) {

                                        for (int i = 0; i < points.Length; i++) {
                                                Core.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2);
                                        }

                                } else {

                                        for (int i = 0; i < 4; i++) {
                                                Core.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2);
                                        }

                                        Core.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2);
                                }

                                Core.putText (rgbaMat, "PLEASE TOUCH 4 POINTS", new Point (5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }

                /// <summary>
                /// Converts the screen point.
                /// </summary>
                /// <returns>The screen point.</returns>
                /// <param name="screenPoint">Screen point.</param>
                /// <param name="quad">Quad.</param>
                /// <param name="cam">Cam.</param>
                static Point convertScreenPoint (Point screenPoint, GameObject quad, Camera cam)
                {
                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
            #else
                        Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
            #endif

                        Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2);
                        Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2);

                        srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
                        dstRectMat.put (0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y);

                        Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat);

            //						Debug.Log ("srcRectMat " + srcRectMat.dump ());
            //						Debug.Log ("dstRectMat " + dstRectMat.dump ());
            //						Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ());

                        MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint);
                        MatOfPoint2f dstPointMat = new MatOfPoint2f ();

                        Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform);

            //						Debug.Log ("srcPointMat " + srcPointMat.dump ());
            //						Debug.Log ("dstPointMat " + dstPointMat.dump ());

                        return dstPointMat.toArray () [0];
                }
            }
コード例 #10
0
        /// <summary>
        /// Converts the screen point.
        /// </summary>
        /// <returns>The screen point.</returns>
        /// <param name="screenPoint">Screen point.</param>
        /// <param name="quad">Quad.</param>
        /// <param name="cam">Cam.</param>
        static Point convertScreenPoint(Point screenPoint, GameObject quad, Camera cam)
        {
            Vector2 tl;
                        Vector2 tr;
                        Vector2 br;
                        Vector2 bl;

                        tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));

                        Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2);
                        Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2);

                        srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
                        dstRectMat.put (0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y);

                        Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat);

                        //						Debug.Log ("srcRectMat " + srcRectMat.dump ());
                        //						Debug.Log ("dstRectMat " + dstRectMat.dump ());
                        //						Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ());

                        MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint);
                        MatOfPoint2f dstPointMat = new MatOfPoint2f ();

                        Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform);

                        //						Debug.Log ("srcPointMat " + srcPointMat.dump ());
                        //						Debug.Log ("dstPointMat " + dstPointMat.dump ());

                        return dstPointMat.toArray () [0];
        }
コード例 #11
0
    /// <summary>
    /// Hands the pose estimation process.
    /// </summary>
    public void handPoseEstimationProcess(Mat rgbaMat)
    {
        //Imgproc.blur(mRgba, mRgba, new Size(5,5));
        Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
        //Imgproc.medianBlur(mRgba, mRgba, 3);

        if (!isColorSelected)
        {
            return;
        }

        List <MatOfPoint> contours = detector.getContours();

        detector.process(rgbaMat);
        //Debug.Log(contours + " | " + contours.Count);
        //string[] output = contours.ToArray();

        for (int i = 0; i < contours.Count; i++)
        {
            //Debug.Log("MatOfPoint2f " + new MatOfPoint2f(contours[i].toArray()) + " | " + i);
            //Debug.Log("MatOfPoint " + contours [i] + " | " + i);
            //Imgproc.circle(rgbaMat, contours[i], 6, new Scalar(0, 255, 0, 255), -1);


            //Debug.Log ("kotka" +  MatOfPoint.ReferenceEquals(x, y));
        }

        if (contours.Count <= 0)
        {
            return;
        }


        RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

        double boundWidth  = rect.size.width;
        double boundHeight = rect.size.height;
        int    boundPos    = 0;

        for (int i = 1; i < contours.Count; i++)
        {
            rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
            if (rect.size.width * rect.size.height > boundWidth * boundHeight)
            {
                boundWidth  = rect.size.width;
                boundHeight = rect.size.height;
                boundPos    = i;
            }
        }

        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
        Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);
        //tochkaX = boundRect.tl ().x;
        //tochkaY = boundRect.tl ().y;
        Imgproc.circle(rgbaMat, boundRect.tl(), 6, new Scalar(0, 255, 0, 255), -1);
        Imgproc.circle(rgbaMat, boundRect.br(), 6, new Scalar(0, 255, 0, 255), -1);
        pointbX = boundRect.br().x;
        pointbY = boundRect.br().y;
        pointaX = boundRect.x;
        pointbY = boundRect.y;
        double a = boundRect.br().y - boundRect.tl().y;

        a = a * 0.7;
        a = boundRect.tl().y + a;
        Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);
        MatOfPoint2f pointMat = new MatOfPoint2f();

        Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
        contours[boundPos] = new MatOfPoint(pointMat.toArray());
        MatOfInt  hull         = new MatOfInt();
        MatOfInt4 convexDefect = new MatOfInt4();

        Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
        if (hull.toArray().Length < 3)
        {
            return;
        }
        Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);
        List <MatOfPoint> hullPoints = new List <MatOfPoint>();
        List <Point>      listPo     = new List <Point>();

        for (int j = 0; j < hull.toList().Count; j++)
        {
            listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
        }
        MatOfPoint e = new MatOfPoint();

        e.fromList(listPo);
        hullPoints.Add(e);
        List <MatOfPoint> defectPoints = new List <MatOfPoint>();
        List <Point>      listPoDefect = new List <Point>();

        for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
        {
            Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
            int   depth    = convexDefect.toList()[j + 3];
            if (depth > 8700 && farPoint.y < a)
            {
                listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
            }
        }

        MatOfPoint e2 = new MatOfPoint();

        e2.fromList(listPo);
        defectPoints.Add(e2);
        Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);
        this.numberOfFingers = listPoDefect.Count;
        if (this.numberOfFingers > 5)
        {
            this.numberOfFingers = 5;
        }
        foreach (Point p in listPoDefect)
        {
            Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
        }
    }
コード例 #12
0
ファイル: DisplayMat.cs プロジェクト: cybercomgroup/HoloCube
    // Update is called once per frame
    void Update()
    {
        Resources.UnloadUnusedAssets(); //Fixes the memory leak

        //Get new picture from camera
        imgTexture = new Texture2D(webcamTexture.width, webcamTexture.height);
        imgTexture.SetPixels(webcamTexture.GetPixels());
        imgTexture.Apply();


        Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imgTexture, imgMat);

        Mat processedMat = new Mat();

        Imgproc.dilate(imgMat, imgMat, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(1, 1)));

        //Grayscale the picture
        Imgproc.cvtColor(imgMat, processedMat, Imgproc.COLOR_RGB2GRAY);


        //Blur the picture
        Imgproc.GaussianBlur(processedMat, processedMat, new Size(3, 3), 1);

        Imgproc.equalizeHist(processedMat, processedMat);


        //Find Edges
        Mat edgesOfPicture = new Mat();

        Imgproc.Canny(processedMat, edgesOfPicture, 75, 225);

        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(edgesOfPicture, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        MatOfPoint2f matOfPoint2f = new MatOfPoint2f();
        MatOfPoint2f approxCurve  = new MatOfPoint2f();
        List <Rect>  rects        = new List <Rect>();


        try
        {
            for (int idx = 0; idx >= 0; idx = (int)hierarchy.get(0, idx)[0])
            {
                MatOfPoint contour     = contours[idx];
                Rect       rect        = Imgproc.boundingRect(contour);
                double     contourArea = Imgproc.contourArea(contour);
                matOfPoint2f.fromList(contour.toList());

                Imgproc.approxPolyDP(matOfPoint2f, approxCurve, Imgproc.arcLength(matOfPoint2f, true) * 0.02, true);
                long total = approxCurve.total();

                if (total == 4)
                {
                    ArrayList cos    = new ArrayList();
                    Point[]   points = approxCurve.toArray();

                    for (int j = 2; j < total + 1; j++)
                    {
                        cos.Add(angle(points[(int)(j % total)], points[j - 2], points[j - 1]));
                    }

                    cos.Sort();
                    Double minCos = (Double)cos[0];
                    Double maxCos = (Double)cos[cos.Count - 1];
                    bool   isRect = total == 4 && minCos >= -0.1 && maxCos <= 0.3;

                    if (isRect)
                    {
                        if (rect.width > 20)
                        {
                            rects.Add(rect);
                        }

                        List <double[]> Colors = new List <double[]>();
                        for (int op = 0; op < 10; op++)
                        {
                            if (rects.Count == 9)
                            {
                                allCubiesScaned = true;

                                Color[] blockOfColour = imgTexture.GetPixels(rect.x + rect.width / 2, rect.y + rect.height, rect.width / 3, rect.height / 3, 0);

                                float r = 0, g = 0, b = 0;
                                foreach (Color pixelBlock in blockOfColour)
                                {
                                    r += pixelBlock.r;
                                    g += pixelBlock.g;
                                    b += pixelBlock.b;
                                }
                                r = r / blockOfColour.Length;
                                g = g / blockOfColour.Length;
                                b = b / blockOfColour.Length;

                                Rgb rgb = new Rgb(r, g, b);

                                Colors.Add(new double[] { rgb.R * 255, rgb.G * 255, rgb.B * 255 });
                                print(Colors.Count);
                                if (Colors.Count == 9)
                                {
                                    ColorMap.Colors = Colors;
                                    ColorMap.Redraw();
                                }
                            }
                        }
                        Imgproc.drawContours(imgMat, contours, idx, new Scalar(255, 100, 155), 4);
                    }
                }
            }
        }
        catch (ArgumentOutOfRangeException e)
        {
        }


        Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(imgMat, texture);
        gameObject.GetComponent <Renderer>().material.mainTexture = texture;
    }
コード例 #13
0
ファイル: Scanner.cs プロジェクト: dylandawk/doodle-AR-Unity
    void FormatImageSquare()
    {
        Mat mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3);
        Mat grayMat = new Mat();

        //Convert Texture2d to Matrix
        Utils.texture2DToMat(baseTexture, mainMat);
        //copy main matrix to grayMat
        mainMat.copyTo(grayMat);

        //Convert color to gray
        Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        //Blur
        Imgproc.GaussianBlur(grayMat, grayMat, new Size(25, 25), 0);

        //contrast
        Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU);
        //extract edge
        Imgproc.Canny(grayMat, grayMat, 50, 50);

        //prepare for the finding contours
        List <MatOfPoint> contours = new List <MatOfPoint>();

        //find the contour from canny edge image
        Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        List <MatOfPoint> tempTargets = new List <MatOfPoint>();

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfPoint   cp = contours[i];
            MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
            double       p  = Imgproc.arcLength(cn, true);

            MatOfPoint2f approx = new MatOfPoint2f();
            //convret to polygon
            Imgproc.approxPolyDP(cn, approx, 0.03 * p, true);


            //find a contour with four points and large area
            int minContourArea = 10000;
            if (approx.toArray().Length == 4 && Imgproc.contourArea(approx) > minContourArea)
            {
                MatOfPoint approxPt = new MatOfPoint();
                approx.convertTo(approxPt, CvType.CV_32S);

                float maxCosine = 0;
                for (int j = 2; j < 5; j++)
                {
                    Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y));
                    Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y));

                    float angle = Mathf.Abs(Vector2.Angle(v1, v2));
                    maxCosine = Mathf.Max(maxCosine, angle);
                }

                if (maxCosine < 135f)
                {
                    tempTargets.Add(approxPt);
                }
            }
        }

        if (tempTargets.Count > 0)
        {
            //Get the first contour
            MatOfPoint approxPt = tempTargets[0];
            //Making Source Mat
            Mat srcPointMat = Converters.vector_Point_to_Mat(approxPt.toList(), CvType.CV_32F);

            //Making Destination Mat
            /*change these values*/
            List <Point> dstPoints = new List <Point>();
            dstPoints.Add(new Point(512, 0));
            dstPoints.Add(new Point(0, 0));
            dstPoints.Add(new Point(0, 512));
            dstPoints.Add(new Point(512, 512));

            Mat dstPointMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);

            //Make Perp transform
            Mat M         = Imgproc.getPerspectiveTransform(srcPointMat, dstPointMat);
            Mat warpedMat = new Mat(mainMat.size(), mainMat.type());
            //Crop and warp the image
            Imgproc.warpPerspective(mainMat, warpedMat, M, new Size(512, 512), Imgproc.INTER_LINEAR);
            warpedMat.convertTo(warpedMat, CvType.CV_8UC3);



            //Convert color to gray
            Imgproc.cvtColor(warpedMat, warpedMat, Imgproc.COLOR_BGR2GRAY);
            ////Blur
            //Imgproc.GaussianBlur(warpedMat, warpedMat, new Size(25, 25), 0);

            //contrast
            Imgproc.threshold(warpedMat, warpedMat, 0, 255, Imgproc.THRESH_OTSU);
            //resize
            Imgproc.resize(warpedMat, warpedMat, new Size(28, 28));
            //Create an empty final texture
            finalTexture = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false);
            //Convert material to texture2d
            Utils.matToTexture2D(warpedMat, finalTexture);
            targetRawImage.texture = finalTexture;
        }
    }
コード例 #14
0
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            // rgbaMat.copyTo(mRgba);
            float DOWNSCALE_RATIO = 1.0f;

            if (enableDownScale)
            {
                mRgba           = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                // mRgba = rgbaMat;
                rgbaMat.copyTo(mRgba);
                DOWNSCALE_RATIO = 1.0f;
            }

            // Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(mRgba, mRgba, new Size(3, 3), 1, 1);
            // Imgproc.medianBlur(mRgba, mRgba, 3);


            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(mRgba);

            //            Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
            Imgproc.rectangle(mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //                    (int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //            Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            // Imgproc.rectangle(mRgba, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]] * DOWNSCALE_RATIO);
            }

            /*
             * MatOfPoint e = new MatOfPoint();
             * e.fromList(listPo);
             * hullPoints.Add(e);
             *
             * List<Point> listPoDefect = new List<Point>();
             *
             * if (convexDefect.rows() > 0)
             * {
             *  List<int> convexDefectList = convexDefect.toList();
             *  List<Point> contourList = contour.toList();
             *  for (int j = 0; j < convexDefectList.Count; j = j + 4)
             *  {
             *      Point farPoint = contourList[convexDefectList[j + 2]];
             *      int depth = convexDefectList[j + 3];
             *      if (depth > threshholdDetect && farPoint.y < a)
             *      {
             *          listPoDefect.Add(contourList[convexDefectList[j + 2]]);
             *          Imgproc.line(rgbaMat, farPoint, listPo[convexDefectList[j + 2]], new Scalar(255, 0, 0, 255),1,1);
             *      }
             *      //                    Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
             *  }
             * }*/


            //            Debug.Log ("hull: " + hull.toList ());
            //            if (convexDefect.rows () > 0) {
            //                Debug.Log ("defects: " + convexDefect.toList ());
            //            }

            //Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            for (int p = 0; p < listPo.Count; p++)
            {
                if (p % 2 == 0)
                {
                    Imgproc.circle(rgbaMat, listPo[p], 6, new Scalar(255, 0, 0, 255), -1);
                    // Imgproc.putText(rgbaMat,p.ToString(),listPo[p],1,1,new Scalar(255,0,0,255));

                    // check if close

                    List <Point> fLMscaled = OpenCVForUnityUtils.ConvertVector2ListToPointList(facePoints);

                    for (int q = 0; q < fLMscaled.Count; q++)
                    {
                        if (ifLessThanDPoint(listPo[p], fLMscaled[q], 8))
                        {
                            //Point1 = listPo[p];
                            //Point2 = fLMscaled[q];
                            handPoint = p;
                            facePoint = q;
                            print(Point1 + " " + Point2);
                        }
                    }

                    if (p == handPoint && facePoint != 0)
                    {
                        Point1 = listPo[p];
                        Point2 = fLMscaled[facePoint];
                        Imgproc.line(rgbaMat, Point1, Point2, new Scalar(255, 255, 255, 255));
                    }
                }
            }



            //            int defectsTotal = (int)convexDefect.total();
            //            Debug.Log ("Defect total " + defectsTotal);

            /*  numberOfFingers = listPoDefect.Count;
             * if (numberOfFingers > 5)
             *    numberOfFingers = 5;/
             *
             * //            Debug.Log ("numberOfFingers " + numberOfFingers);
             *
             * //            Imgproc.putText (rgbaMat, "" + numberOfFingers, new Point (rgbaMat.cols () / 2, rgbaMat.rows () / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
             *
             *
             * /*   foreach (Point p in listPoDefect) {
             *
             *    Point tempp = GetNearestL(p, listPo);
             *    tempp = ConvertDownscale(tempp, DOWNSCALE_RATIO);
             *    Point p2 = ConvertDownscale(p, DOWNSCALE_RATIO);
             *
             *    Imgproc.circle (rgbaMat, tempp, 6, new Scalar (0, 0, 255, 255), -1);
             *    Imgproc.circle(rgbaMat, p2, 6, new Scalar(255, 0, 255, 255), -1);
             * }*/
        }
コード例 #15
0
        public void ProcessSkin(Mat rgbaImage)
        {
            Imgproc.pyrDown(rgbaImage, mPyrDownMat);
            Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
            Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA);
            Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb);

            Core.inRange(mHsvMat, mLowerBoundHSV, mUpperBoundHSV, mMaskHSV);
            Core.inRange(mPyrDownMat, mLowerBoundRGB, mUpperBoundRGB, mMaskRGB);
            Core.inRange(mYCrCbMat, mLowerBoundYCrCb, mUpperBoundYCrCb, mMaskYCrCb);

            mMask = mMaskYCrCb & mMaskHSV & mMaskRGB;

            Imgproc.dilate(mMask, mDilatedMask, new Mat());

            List <MatOfPoint> contours = new List <MatOfPoint>();

            Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            if (contours.Count == 0)
            {
                return;
            }

            // Find max contour area
            double     maxArea              = 0;
            double     secondMaxArea        = 0;
            MatOfPoint biggestContour       = null;
            MatOfPoint secondBiggestContour = null;

            foreach (MatOfPoint each in contours)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.contourArea(wrapper);
                if (area > maxArea)
                {
                    secondMaxArea        = maxArea;
                    secondBiggestContour = biggestContour;

                    maxArea        = area;
                    biggestContour = each;
                }
                else if (area > secondMaxArea)
                {
                    secondMaxArea        = area;
                    secondBiggestContour = each;
                }
            }

            handContourSize = maxArea;

            if ((biggestContour != null) && (secondBiggestContour != null) && (ComputeAVGXForContour(biggestContour) >= ComputeAVGXForContour(secondBiggestContour)) && (secondMaxArea >= 0.3 * maxArea))
            {
                biggestContour  = secondBiggestContour;
                handContourSize = secondMaxArea;
            }


            MatOfPoint2f contours_res2f = new MatOfPoint2f();

            MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray());

            Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true);
            handContour = new MatOfPoint(contours_res2f.toArray());
            contours_res2f.Dispose();
            biggestContour2f.Dispose();

            if (Imgproc.contourArea(handContour) > mMinContourArea * maxArea)
            {
                Core.multiply(handContour, new Scalar(4, 4), handContour);
            }

            // Filter contours by area and resize to fit the original image size
            mContours.Clear();

            foreach (MatOfPoint each in contours)
            {
                MatOfPoint contour = each;
                if (Imgproc.contourArea(contour) > mMinContourArea * maxArea)
                {
                    Core.multiply(contour, new Scalar(4, 4), contour);
                    mContours.Add(contour);
                }
            }
        }
コード例 #16
0
		/// <summary>
		/// Finds the candidates.
		/// </summary>
		/// <param name="contours">Contours.</param>
		/// <param name="detectedMarkers">Detected markers.</param>
		void findCandidates (List<MatOfPoint> contours, List<Marker> detectedMarkers)
		{
				MatOfPoint2f approxCurve = new MatOfPoint2f ();
				
				List<Marker> possibleMarkers = new List<Marker> ();
		
				// For each contour, analyze if it is a parallelepiped likely to be the marker
				for (int i=0; i<contours.Count; i++) {
						// Approximate to a polygon
						double eps = contours [i].toArray ().Length * 0.05;
						Imgproc.approxPolyDP (new MatOfPoint2f (contours [i].toArray ()), approxCurve, eps, true);

						Point[] approxCurveArray = approxCurve.toArray ();
			
						// We interested only in polygons that contains only four points
						if (approxCurveArray.Length != 4)
								continue;
			
						// And they have to be convex
						if (!Imgproc.isContourConvex (new MatOfPoint (approxCurveArray)))
								continue;

			
						// Ensure that the distance between consecutive points is large enough
						float minDist = float.MaxValue;

						for (int p = 0; p < 4; p++) {
								Point side = new Point (approxCurveArray [p].x - approxCurveArray [(p + 1) % 4].x, approxCurveArray [p].y - approxCurveArray [(p + 1) % 4].y);
								float squaredSideLength = (float)side.dot (side);
								minDist = Mathf.Min (minDist, squaredSideLength);
						}
			
						// Check that distance is not very small
						if (minDist < m_minContourLengthAllowed)
								continue;
			
						// All tests are passed. Save marker candidate:
						Marker m = new Marker ();
						m.points = new MatOfPoint ();

						List<Point> markerPointsList = new List<Point> ();
			
						for (int p = 0; p<4; p++)
								markerPointsList.Add (new Point (approxCurveArray [p].x, approxCurveArray [p].y));


			
						// Sort the points in anti-clockwise order
						// Trace a line between the first and second point.
						// If the third point is at the right side, then the points are anti-clockwise
						Point v1 = new Point (markerPointsList [1].x - markerPointsList [0].x, markerPointsList [1].y - markerPointsList [0].y);
						Point v2 = new Point (markerPointsList [2].x - markerPointsList [0].x, markerPointsList [2].y - markerPointsList [0].y);
			
						double o = (v1.x * v2.y) - (v1.y * v2.x);
			
						if (o < 0.0) {		 //if the third point is in the left side, then sort in anti-clockwise order
								Point tmp = markerPointsList [1];
								markerPointsList [1] = markerPointsList [3];
								markerPointsList [3] = tmp;

						}

						m.points.fromList (markerPointsList);
			
						possibleMarkers.Add (m);
				}
				approxCurve.Dispose ();

		        
				//Debug.Log ("possibleMarkers " + possibleMarkers.Count);
		
		
				// Remove these elements which corners are too close to each other.
				// First detect candidates for removal:
				List< Point > tooNearCandidates = new List<Point> ();
				for (int i=0; i<possibleMarkers.Count; i++) {
						Marker m1 = possibleMarkers [i];

						Point[] m1PointsArray = m1.points.toArray ();
			
						//calculate the average distance of each corner to the nearest corner of the other marker candidate
						for (int j=i+1; j<possibleMarkers.Count; j++) {
								Marker m2 = possibleMarkers [j];

								Point[] m2PointsArray = m2.points.toArray ();
				
								float distSquared = 0;
				
								for (int c = 0; c < 4; c++) {
										Point v = new Point (m1PointsArray [c].x - m2PointsArray [c].x, m1PointsArray [c].y - m2PointsArray [c].y);
										distSquared += (float)v.dot (v);
								}
				
								distSquared /= 4;
				
								if (distSquared < 100) {
										tooNearCandidates.Add (new Point (i, j));
								}
						}
				}
		
				// Mark for removal the element of the pair with smaller perimeter
				List<bool> removalMask = new List<bool> (possibleMarkers.Count);
				for (int i = 0; i < possibleMarkers.Count; i++) {
						removalMask.Add (false);
				}
		
				for (int i=0; i<tooNearCandidates.Count; i++) {

						float p1 = perimeter (possibleMarkers [(int)tooNearCandidates [i].x].points);
						float p2 = perimeter (possibleMarkers [(int)tooNearCandidates [i].y].points);
			
						int removalIndex;
						if (p1 > p2)
								removalIndex = (int)tooNearCandidates [i].x;
						else
								removalIndex = (int)tooNearCandidates [i].y;
			
						removalMask [removalIndex] = true;
				}
		
				// Return candidates
				detectedMarkers.Clear ();
				for (int i=0; i<possibleMarkers.Count; i++) {
						if (!removalMask [i])
								detectedMarkers.Add (possibleMarkers [i]);
				}
		}
コード例 #17
0
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
                        Imgproc.GaussianBlur (rgbaMat, rgbaMat, new OpenCVForUnity.Size (3, 3), 1, 1);
                        //Imgproc.medianBlur(mRgba, mRgba, 3);

                        if (!isColorSelected)
                                return;

                        List<MatOfPoint> contours = detector.getContours ();
                        detector.process (rgbaMat);

            //						Debug.Log ("Contours count: " + contours.Count);

                        if (contours.Count <= 0) {
                                return;
                        }

                        RotatedRect rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [0].toArray ()));

                        double boundWidth = rect.size.width;
                        double boundHeight = rect.size.height;
                        int boundPos = 0;

                        for (int i = 1; i < contours.Count; i++) {
                                rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [i].toArray ()));
                                if (rect.size.width * rect.size.height > boundWidth * boundHeight) {
                                        boundWidth = rect.size.width;
                                        boundHeight = rect.size.height;
                                        boundPos = i;
                                }
                        }

                        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect (new MatOfPoint (contours [boundPos].toArray ()));
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), boundRect.br (), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //						Debug.Log (
            //						" Row start [" +
            //								(int)boundRect.tl ().y + "] row end [" +
            //								(int)boundRect.br ().y + "] Col start [" +
            //								(int)boundRect.tl ().x + "] Col end [" +
            //								(int)boundRect.br ().x + "]");

                        double a = boundRect.br ().y - boundRect.tl ().y;
                        a = a * 0.7;
                        a = boundRect.tl ().y + a;

            //						Debug.Log (
            //						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

                        //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

                        MatOfPoint2f pointMat = new MatOfPoint2f ();
                        Imgproc.approxPolyDP (new MatOfPoint2f (contours [boundPos].toArray ()), pointMat, 3, true);
                        contours [boundPos] = new MatOfPoint (pointMat.toArray ());

                        MatOfInt hull = new MatOfInt ();
                        MatOfInt4 convexDefect = new MatOfInt4 ();
                        Imgproc.convexHull (new MatOfPoint (contours [boundPos].toArray ()), hull);

                        if (hull.toArray ().Length < 3)
                                return;

                        Imgproc.convexityDefects (new MatOfPoint (contours [boundPos]	.toArray ()), hull, convexDefect);

                        List<MatOfPoint> hullPoints = new List<MatOfPoint> ();
                        List<Point> listPo = new List<Point> ();
                        for (int j = 0; j < hull.toList().Count; j++) {
                                listPo.Add (contours [boundPos].toList () [hull.toList () [j]]);
                        }

                        MatOfPoint e = new MatOfPoint ();
                        e.fromList (listPo);
                        hullPoints.Add (e);

                        List<MatOfPoint> defectPoints = new List<MatOfPoint> ();
                        List<Point> listPoDefect = new List<Point> ();
                        for (int j = 0; j < convexDefect.toList().Count; j = j+4) {
                                Point farPoint = contours [boundPos].toList () [convexDefect.toList () [j + 2]];
                                int depth = convexDefect.toList () [j + 3];
                                if (depth > threasholdSlider.value && farPoint.y < a) {
                                        listPoDefect.Add (contours [boundPos].toList () [convexDefect.toList () [j + 2]]);
                                }
            //								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
                        }

                        MatOfPoint e2 = new MatOfPoint ();
                        e2.fromList (listPo);
                        defectPoints.Add (e2);

            //						Debug.Log ("hull: " + hull.toList ());
            //						Debug.Log ("defects: " + convexDefect.toList ());

                        Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            //                      int defectsTotal = (int)convexDefect.total();
            //						Debug.Log ("Defect total " + defectsTotal);

                        this.numberOfFingers = listPoDefect.Count;
                        if (this.numberOfFingers > 5)
                                this.numberOfFingers = 5;

            //						Debug.Log ("numberOfFingers " + numberOfFingers);

            //						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
                        numberOfFingersText.text = numberOfFingers.ToString ();

                        foreach (Point p in listPoDefect) {
                                Imgproc.circle (rgbaMat, p, 6, new Scalar (255, 0, 255, 255), -1);
                        }
        }
コード例 #18
0
ファイル: ImageChanges.cs プロジェクト: dr-iskandar/BioARsdk
    void Start()
    {
        Mat mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3);
        Mat grayMat = new Mat();

        sourceRawImage.texture = baseTexture;

        Utils.texture2DToMat(baseTexture, mainMat);
        mainMat.copyTo(grayMat);

        Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);
        Imgproc.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0);
        Imgproc.threshold(grayMat, grayMat, 110, 225, Imgproc.THRESH_BINARY);
        Imgproc.Canny(grayMat, grayMat, 20, 190);

        List <MatOfPoint> contours = new List <MatOfPoint>();

        Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        int num = 0;
        List <MatOfPoint> contours_list = new List <MatOfPoint>();

        //  new logic
        // List<MatOfPoint> contours_list = new List<MatOfPoint>();
        for (int i = 0; i < contours.Count; i++)
        {
            MatOfPoint   cp     = contours[i];
            MatOfPoint2f cn     = new MatOfPoint2f(cp.toArray());
            double       p      = Imgproc.arcLength(cn, true);
            MatOfPoint2f approx = new MatOfPoint2f();
            Imgproc.approxPolyDP(cn, approx, 0.01 * p, true);
            double area = Imgproc.contourArea(contours[i]);
            if ((area > 30 && area < 100) && approx.toArray().Length > 8)
            {
                // Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4);
                contours_list.Add(contours[i]);
                num = num + 1;
                Debug.Log(area);
            }
        }



        // previously working
        // for(int i =0; i< contours.Count ; i++){
        //     MatOfPoint cp = contours[i];
        //     MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
        //     double p = Imgproc.arcLength(cn,true);

        //     //  fron akshay file
        //     double area = Imgproc.contourArea(contours[i]);
        //     if(area > 50){
        //         Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4);
        //         num = num + 1;
        //         Debug.Log(area);
        //     }

        // }



        // for(int i =0; i< contours.Count ; i++){
        //     double area = Imgproc.contourArea(contours[i]);
        //     if(area > 50){
        //         // Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4);
        //         contours_list.Add(contours[i]);
        //         num = num + 1;
        //     }
        // }

        for (int i = 0; i < contours_list.Count; i++)
        {
            Imgproc.drawContours(mainMat, contours_list, -1, new Scalar(0, 255, 0), 4);
        }

        Debug.Log("Number : " + num);
        info.text += (num - 1).ToString();

        Texture2D finaltexture = new Texture2D(grayMat.cols(), grayMat.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(grayMat, finaltexture);
        sourceRawImage.texture = finaltexture;
    }
コード例 #19
0
        /// <summary>
        /// Finds the candidates.
        /// </summary>
        /// <param name="contours">Contours.</param>
        /// <param name="detectedMarkers">Detected markers.</param>
        void findCandidates(List <MatOfPoint> contours, List <Marker> detectedMarkers)
        {
            MatOfPoint2f approxCurve = new MatOfPoint2f();

            List <Marker> possibleMarkers = new List <Marker> ();

            // For each contour, analyze if it is a parallelepiped likely to be the marker
            for (int i = 0; i < contours.Count; i++)
            {
                // Approximate to a polygon
                double eps = contours [i].toArray().Length * 0.05;
                Imgproc.approxPolyDP(new MatOfPoint2f(contours [i].toArray()), approxCurve, eps, true);

                Point[] approxCurveArray = approxCurve.toArray();

                // We interested only in polygons that contains only four points
                if (approxCurveArray.Length != 4)
                {
                    continue;
                }

                // And they have to be convex
                if (!Imgproc.isContourConvex(new MatOfPoint(approxCurveArray)))
                {
                    continue;
                }


                // Ensure that the distance between consecutive points is large enough
                float minDist = float.MaxValue;

                for (int p = 0; p < 4; p++)
                {
                    Point side = new Point(approxCurveArray [p].x - approxCurveArray [(p + 1) % 4].x, approxCurveArray [p].y - approxCurveArray [(p + 1) % 4].y);
                    float squaredSideLength = (float)side.dot(side);
                    minDist = Mathf.Min(minDist, squaredSideLength);
                }

                // Check that distance is not very small
                if (minDist < m_minContourLengthAllowed)
                {
                    continue;
                }

                // All tests are passed. Save marker candidate:
                Marker m = new Marker();
                m.points = new MatOfPoint();

                List <Point> markerPointsList = new List <Point> ();

                for (int p = 0; p < 4; p++)
                {
                    markerPointsList.Add(new Point(approxCurveArray [p].x, approxCurveArray [p].y));
                }



                // Sort the points in anti-clockwise order
                // Trace a line between the first and second point.
                // If the third point is at the right side, then the points are anti-clockwise
                Point v1 = new Point(markerPointsList [1].x - markerPointsList [0].x, markerPointsList [1].y - markerPointsList [0].y);
                Point v2 = new Point(markerPointsList [2].x - markerPointsList [0].x, markerPointsList [2].y - markerPointsList [0].y);

                double o = (v1.x * v2.y) - (v1.y * v2.x);

                if (o < 0.0)         //if the third point is in the left side, then sort in anti-clockwise order
                {
                    Point tmp = markerPointsList [1];
                    markerPointsList [1] = markerPointsList [3];
                    markerPointsList [3] = tmp;
                }

                m.points.fromList(markerPointsList);

                possibleMarkers.Add(m);
            }
            approxCurve.Dispose();


            //Debug.Log ("possibleMarkers " + possibleMarkers.Count);


            // Remove these elements which corners are too close to each other.
            // First detect candidates for removal:
            List <Point> tooNearCandidates = new List <Point> ();

            for (int i = 0; i < possibleMarkers.Count; i++)
            {
                Marker m1 = possibleMarkers [i];

                Point[] m1PointsArray = m1.points.toArray();

                //calculate the average distance of each corner to the nearest corner of the other marker candidate
                for (int j = i + 1; j < possibleMarkers.Count; j++)
                {
                    Marker m2 = possibleMarkers [j];

                    Point[] m2PointsArray = m2.points.toArray();

                    float distSquared = 0;

                    for (int c = 0; c < 4; c++)
                    {
                        Point v = new Point(m1PointsArray [c].x - m2PointsArray [c].x, m1PointsArray [c].y - m2PointsArray [c].y);
                        distSquared += (float)v.dot(v);
                    }

                    distSquared /= 4;

                    if (distSquared < 100)
                    {
                        tooNearCandidates.Add(new Point(i, j));
                    }
                }
            }

            // Mark for removal the element of the pair with smaller perimeter
            List <bool> removalMask = new List <bool> (possibleMarkers.Count);

            for (int i = 0; i < possibleMarkers.Count; i++)
            {
                removalMask.Add(false);
            }

            for (int i = 0; i < tooNearCandidates.Count; i++)
            {
                float p1 = perimeter(possibleMarkers [(int)tooNearCandidates [i].x].points);
                float p2 = perimeter(possibleMarkers [(int)tooNearCandidates [i].y].points);

                int removalIndex;
                if (p1 > p2)
                {
                    removalIndex = (int)tooNearCandidates [i].x;
                }
                else
                {
                    removalIndex = (int)tooNearCandidates [i].y;
                }

                removalMask [removalIndex] = true;
            }

            // Return candidates
            detectedMarkers.Clear();
            for (int i = 0; i < possibleMarkers.Count; i++)
            {
                if (!removalMask [i])
                {
                    detectedMarkers.Add(possibleMarkers [i]);
                }
            }
        }
        // Use this for initialization
        void Start()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Texture2D imgTexture = Resources.Load("lena") as Texture2D;

            Mat img = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(imgTexture, img);
            Debug.Log("imgMat.ToString() " + img.ToString());


            OpenCVForUnity.ShapeModule.ThinPlateSplineShapeTransformer tps = Shape.createThinPlateSplineShapeTransformer(0);
            MatOfPoint2f sourcePoints = new MatOfPoint2f(
                new Point(0, 0),
                new Point(512, 0),
                new Point(0, 512),

                new Point(250, 200),
                new Point(400, 400),
                new Point(200, 400),

                new Point(512, 512)
                );
            MatOfPoint2f targetPoints = new MatOfPoint2f(
                new Point(0, 0),
                new Point(512, 0),
                new Point(0, 599),

                new Point(250, 120),
                new Point(450, 450),
                new Point(100, 450),

                new Point(512, 512)
                );
            MatOfDMatch matches = new MatOfDMatch(
                new DMatch(0, 0, 0),
                new DMatch(1, 1, 0),
                new DMatch(2, 2, 0),
                new DMatch(3, 3, 0),
                new DMatch(4, 4, 0),
                new DMatch(5, 5, 0),
                new DMatch(6, 6, 0)
                );


            //http://stackoverflow.com/questions/32207085/shape-transformers-and-interfaces-opencv3-0
            Core.transpose(sourcePoints, sourcePoints);
            Core.transpose(targetPoints, targetPoints);

            Debug.Log("sourcePoints " + sourcePoints.ToString());
            Debug.Log("targetPoints " + targetPoints.ToString());

            tps.estimateTransformation(targetPoints, sourcePoints, matches);

            MatOfPoint2f transPoints = new MatOfPoint2f();

            tps.applyTransformation(sourcePoints, transPoints);

            Debug.Log("sourcePoints " + sourcePoints.dump());
            Debug.Log("targetPoints " + targetPoints.dump());
            Debug.Log("transPoints " + transPoints.dump());


            Mat res = new Mat();

            tps.warpImage(img, res);

            //plot points
            Point[] sourcePointsArray = sourcePoints.toArray();
            Point[] targetPointsArray = targetPoints.toArray();
            for (int i = 0; i < sourcePointsArray.Length; i++)
            {
                Imgproc.arrowedLine(res, sourcePointsArray [i], targetPointsArray [i], new Scalar(255, 255, 0, 255), 3, Imgproc.LINE_AA, 0, 0.2);

                Imgproc.circle(res, sourcePointsArray [i], 10, new Scalar(255, 0, 0, 255), -1);
                Imgproc.circle(res, targetPointsArray [i], 10, new Scalar(0, 0, 255, 255), -1);
            }


            Texture2D texture = new Texture2D(res.cols(), res.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(res, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
コード例 #21
0
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);

//						Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours [boundPos].toArray()));
            Core.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

//						Debug.Log (
//						" Row start [" +
//								(int)boundRect.tl ().y + "] row end [" +
//								(int)boundRect.br ().y + "] Col start [" +
//								(int)boundRect.tl ().x + "] Col end [" +
//								(int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

//						Debug.Log (
//						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
            Core.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours [boundPos].toArray()), pointMat, 3, true);
            contours [boundPos] = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours [boundPos].toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contours [boundPos].toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();
            List <Point>      listPo     = new List <Point> ();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours [boundPos].toList() [hull.toList() [j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <MatOfPoint> defectPoints = new List <MatOfPoint> ();
            List <Point>      listPoDefect = new List <Point> ();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours [boundPos].toList() [convexDefect.toList() [j + 2]];
                int   depth    = convexDefect.toList() [j + 3];
                if (depth > threasholdSlider.value && farPoint.y < a)
                {
                    listPoDefect.Add(contours [boundPos].toList() [convexDefect.toList() [j + 2]]);
                }
//								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

//						Debug.Log ("hull: " + hull.toList ());
//						Debug.Log ("defects: " + convexDefect.toList ());

            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

//                      int defectsTotal = (int)convexDefect.total();
//						Debug.Log ("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

//						Debug.Log ("numberOfFingers " + numberOfFingers);

//						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Core.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
コード例 #22
0
    //public delegate void Process(int[] tgrdeteced);
    void tagramDetect(Mat t_rgbaMat, Action <TangramResultModel, List <MyShape> > prc)
    {
        List <MyShape> lms = new List <MyShape>();

        System.Diagnostics.Stopwatch watch = null;

        long elapsedMs;
        TangramResultModel trm = null;

        Observable.Start(() =>
        {
            mut.WaitOne();
            Imgproc.resize(t_rgbaMat, rgbaMat, new Size(nW_goc, nH_goc));
            watch = System.Diagnostics.Stopwatch.StartNew();

            if (warp != null)
            {
                warp.Init(rgbaMat);
                Mat wMat = warp.warpPerspective(rgbaMat);
                rgbaMat  = wMat.submat(0, nH, 0, nW);
            }
            else
            {
                rgbaMat = rgbaMat.submat(0, nH, 0, nW);
            }

            all_thresh      = Mat.zeros(nH, nW, CvType.CV_8UC3);
            all_thresh_afct = Mat.zeros(nH, nW, CvType.CV_8UC3);
            dbMat           = Mat.zeros(nH, nW, CvType.CV_8UC3);
            all_thresh_af   = Mat.zeros(nH, nW, CvType.CV_8UC3);

            rgbaMat.copyTo(rgbMat);
            rgbMat.convertTo(rgbMat2, CvType.CV_8UC3, 0.8, 60);
            rgbMat2.copyTo(rgbMat2copy);
            rgbMat.convertTo(rgbMat3, CvType.CV_8UC3, 1, 60);
            rgbMat.convertTo(rgbMat4, CvType.CV_8UC3, 1.25, 35);
            rgbMat.convertTo(rgbMat, CvType.CV_8UC3, 1.25, 35);


            Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat2, hsvMat2, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat3, hsvMat3, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat3, hsvMat4, Imgproc.COLOR_RGB2HSV);

            watch.Stop();
            elapsedMs = watch.ElapsedMilliseconds;

            Mat markers = Mat.zeros(rgbaMat.size(), CvType.CV_32SC1);

            watch = System.Diagnostics.Stopwatch.StartNew();

            for (int obj_i = 0; obj_i < ls_obj.Length; obj_i++)
            {
                var obj = ls_obj[obj_i];

                if (obj_i == (int)tgr.ORANGE | obj_i == (int)tgr.YELLOW | obj_i == (int)tgr.GREEN)
                {
                    Core.inRange(hsvMat2, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }
                else if (obj_i == (int)tgr.LIGHTBLUE)
                {
                    Core.inRange(hsvMat, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }
                else
                {
                    Core.inRange(hsvMat, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }


                if (obj_i == (int)tgr.RED)
                {
                    Core.inRange(hsvMat, new Scalar(0, 20, 45), new Scalar(5, 255, 255), thresholdMat2);
                    thresholdMat2.copyTo(thresholdMat, thresholdMat2);
                }


                thresholdMatArr[obj_i] = thresholdMat.clone();
            }

            //thresholdMatArr[(int)tgr.LIGHTBLUE].setTo(new Scalar(0), thresholdMatArr[(int)tgr.BLUE]);
            //thresholdMatArr[(int)tgr.LIGHTBLUE].setTo(new Scalar(0), thresholdMatArr[(int)tgr.GREEN]);


            for (int obj_i = 0; obj_i < ls_obj.Length; obj_i++)
            {
                var obj = ls_obj[obj_i];

                all_cts.Clear();
                thresholdMat = thresholdMatArr[obj_i];
                if (toggle_db[obj_i] == true)
                {
                    all_thresh.setTo(obj.ColorRGB, thresholdMat);
                }

                if (true | obj_i == (int)tgr.PURPLE | obj_i == (int)tgr.YELLOW | obj_i == (int)tgr.RED | obj_i == (int)tgr.GREEN | obj_i == (int)tgr.ORANGE)
                {
                    Imgproc.erode(thresholdMat, thresholdMat2, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)), new Point(-1, -1), 1);
                }
                if (obj_i == (int)tgr.LIGHTBLUE | obj_i == (int)tgr.PURPLE)
                {
                    Imgproc.erode(thresholdMat, thresholdMat2, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)), new Point(-1, -1), 1);
                }

                if (toggle_db[obj_i] == true)
                {
                    all_thresh_af.setTo(obj.ColorRGB, thresholdMat2);
                }
                all_thresh_afct.setTo(new Scalar(obj_i + 1), thresholdMat2);

                color_filter.Add(thresholdMat2.clone());

                Imgproc.findContours(thresholdMat2, all_cts, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
                Scalar c = obj.getColor();

                for (int ct_i = 0; ct_i < all_cts.Count; ct_i++)
                {
                    double area = Imgproc.contourArea(all_cts[ct_i]);
                    // if (area < MIN_OBJECT_AREA)
                    if (area < MIN_OBJECT_AREAS[obj_i] * 0.55)
                    {
                        all_cts.RemoveAt(ct_i);
                        ct_i--;
                    }
                    if (area > MAX_OBJECT_AREAS[obj_i] * 1.3)
                    {
                        all_cts.RemoveAt(ct_i);
                        ct_i--;
                    }
                }

                MyShape chon = null;
                MyShape ms   = new MyShape();
                float dt     = 1000000;

                for (int ct_i = 0; ct_i < all_cts.Count; ct_i++)
                {
                    var ct      = all_cts[ct_i];
                    var peri    = Imgproc.arcLength(new MatOfPoint2f(ct.toArray()), true);
                    var epsilon = 0.1 * peri;
                    if (obj_i == (int)tgr.ORANGE || obj_i == (int)tgr.YELLOW)
                    {
                        epsilon = 0.065 * peri;
                    }
                    Imgproc.approxPolyDP(new MatOfPoint2f(ct.toArray()), approx_ct, epsilon, true);

                    MatOfInt pts_cvh = new MatOfInt();
                    Imgproc.convexHull(ct, pts_cvh, true);
                    var cvh_numPts  = pts_cvh.toArray().Length;
                    Point[] cvh_pts = new Point[cvh_numPts];
                    var ct_pts      = ct.toArray();



                    for (int i = 0; i < cvh_numPts; i++)
                    {
                        var i1     = pts_cvh.toArray()[i];
                        var p1     = ct_pts[i1];
                        cvh_pts[i] = p1;

                        try
                        {
                            if (debug == true)
                            {
                                var i2 = pts_cvh.toArray()[(i + 1) % cvh_numPts];
                                var p2 = ct_pts[i2];
                                Imgproc.circle(rgbMat2, p1, 1, c, 2);
                            }
                        }
                        catch (Exception e)
                        {
                            Utilities.LogFormat("Here3:{0},{1},{2}", rgbMat2 == null, p1 == null, c == null);
                            Utilities.Log("Exception is {0}", e.ToString());
                            Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                        }
                    }


                    MatOfPoint2f approx_cvh = new MatOfPoint2f();

                    var epsilon2 = peri * 0.1;
                    if (obj_i == (int)tgr.ORANGE)
                    {
                        epsilon2 = peri * 0.065;
                    }
                    Imgproc.approxPolyDP(new MatOfPoint2f(cvh_pts), approx_cvh, epsilon2, true);

                    var ct_ori            = new MatOfPoint(ct.toArray());
                    MatOfPoint approx_ct2 = new MatOfPoint(approx_ct.toArray());

                    List <MatOfPoint> approx_cvh2 = new List <MatOfPoint>();
                    approx_cvh2.Add(new MatOfPoint(approx_cvh.toArray()));

                    var mu    = Imgproc.moments(approx_cvh2[0], true);
                    cterTgr.x = mu.m10 / mu.m00;
                    cterTgr.y = mu.m01 / mu.m00;

                    if (approx_ct2.size().height == 3 | approx_ct2.size().height == 4)
                    {
                        var points    = approx_cvh2[0].toArray();
                        var numpoints = points.Length;

                        ms._id = obj_i;
                        ms.ps  = new Point[numpoints];


                        double rat = 1.16;
                        if (obj_i == (int)tgr.PURPLE)
                        {
                            rat = 1.20;
                        }
                        else if (obj_i == (int)tgr.LIGHTBLUE)
                        {
                            rat = 1.20;
                        }
                        else if (obj_i == (int)tgr.RED | obj_i == (int)tgr.BLUE)
                        {
                            rat = 1.09;
                        }
                        else if (obj_i == (int)tgr.YELLOW)
                        {
                            rat = 1.10;
                        }
                        else if (obj_i == (int)tgr.ORANGE)
                        {
                            rat = 1.10;
                        }
                        else if (obj_i == (int)tgr.GREEN)
                        {
                            rat = 1.10;
                        }

                        var ind_huyen = 0;
                        var max       = -1d;

                        if (numpoints == 3 || numpoints == 4)
                        {
                            for (int p_i = 0; p_i < numpoints; p_i++)
                            {
                                var p  = points[p_i];
                                var p2 = points[(p_i + 1) % numpoints];

                                var vect = p - cterTgr;

                                vect = vect * rat;

                                var p_new     = cterTgr + vect;
                                points[p_i].x = (int)(p_new.x * 100) / 100f;
                                points[p_i].y = (int)(p_new.y * 100) / 100f;


                                if (numpoints == 4)
                                {
                                    ms.ps[p_i] = p_new;
                                }

                                if (numpoints == 3)
                                {
                                    var vt     = p2 - p;
                                    var length = vt.x * vt.x + vt.y * vt.y;
                                    if (length > max)
                                    {
                                        ind_huyen = p_i;
                                        max       = length;
                                    }
                                }
                            }
                        }

                        if (numpoints == 3)
                        {
                            var i_nhon1 = ind_huyen;
                            var i_nhon2 = (ind_huyen + 1) % numpoints;
                            var i_vuong = (ind_huyen + 2) % numpoints;

                            ms.ps[0] = points[i_vuong];
                            ms.ps[1] = points[i_nhon1];
                            ms.ps[2] = points[i_nhon2];
                        }
                        else if (numpoints == 4)
                        {
                            if (obj_i == (int)tgr.ORANGE)
                            {
                                var vt_cheo1   = ms.ps[0] - ms.ps[2];
                                var vt_cheo2   = ms.ps[1] - ms.ps[3];
                                var leng_cheo1 = vt_cheo1.x * vt_cheo1.x + vt_cheo1.y * vt_cheo1.y;
                                var leng_cheo2 = vt_cheo2.x * vt_cheo2.x + vt_cheo2.y * vt_cheo2.y;
                                var i_nhon     = 0;
                                if (leng_cheo2 > leng_cheo1)
                                {
                                    i_nhon = 1;
                                }

                                ms.ps[0] = points[i_nhon];
                                ms.ps[1] = points[(i_nhon + 1)];
                                ms.ps[2] = points[(i_nhon + 2)];
                                ms.ps[3] = points[(i_nhon + 3) % numpoints];

                                var i_prvNhon   = (i_nhon + 4 - 1) % numpoints;
                                var i_aftNhon   = i_nhon + 1;
                                var vt_prvNhon  = points[i_prvNhon] - points[i_nhon];
                                var vt_aftNhon  = points[i_aftNhon] - points[i_nhon];
                                var len_prvNhon = vt_prvNhon.x * vt_prvNhon.x + vt_prvNhon.y * vt_prvNhon.y;
                                var len_aftNhon = vt_aftNhon.x * vt_aftNhon.x + vt_aftNhon.y * vt_aftNhon.y;

                                Imgproc.line(dbMat, points[i_prvNhon], points[i_nhon], c, 1);

                                if (len_prvNhon > len_aftNhon)
                                {
                                    ms.isFlip = true;
                                    Imgproc.putText(dbMat, " IsFLIP", ms.ps[3], 1, 1, c, 1);
                                }
                                else
                                {
                                    ms.isFlip = false;
                                    Imgproc.putText(dbMat, " IsNOTFLIP", ms.ps[3], 1, 1, c, 1);
                                }
                            }
                        }

                        var centerMat = new Point(rgbMat.width() / 2f, rgbMat.height() / 2f);
                        var vtLech    = centerMat - cterTgr;
                        var dt2       = vtLech.x * vtLech.x + vtLech.y * vtLech.y;
                        if (dt2 < dt)
                        {
                            chon = ms;
                        }
                    }
                    try
                    {
                        Imgproc.circle(rgbMat, cterTgr, 1, c, 1);
                        Imgproc.putText(rgbMat, mu.m00.ToString(), cterTgr, 1, 1, c, 1);
                    }
                    catch (Exception e)
                    {
                        Utilities.LogFormat("Here2:{0},{1},{2}", rgbMat == null, cterTgr == null, c == null);
                        Utilities.Log("Exception is {0}", e.ToString());
                        Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                    }

                    //if (approx_ct2.size().height == 3 | approx_ct2.size().height == 4) break;
                }

                if (chon != null)
                {
                    lms.Add(chon);

                    var ps = chon.ps;
                    for (int i = 0; i < ps.Length; i++)
                    {
                        var p1 = ps[i];
                        var p2 = ps[(i + 1) % ps.Length];

                        try
                        {
                            Imgproc.line(rgbMat2, p1, p2, c, 1);
                            Imgproc.line(all_thresh_afct, p1, p2, new Scalar(255, 255, 255), 1);
                            Imgproc.line(dbMat, p1, p2, c, 1);
                            Imgproc.circle(dbMat, p1, 1, c);
                        }
                        catch (Exception e)
                        {
                            Utilities.LogFormat("Here1:{0},{1},{2}", rgbMat2 == null, p1 == null, p2 == null);
                            Utilities.Log("Exception is {0}", e.ToString());
                            Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                        }
                    }
                }

                watch.Stop();
                elapsedMs = watch.ElapsedMilliseconds;
            }

            TangramShape msl = new TangramShape();
            msl.datas        = lms;
            var json         = JsonUtility.ToJson(msl);

            watch = System.Diagnostics.Stopwatch.StartNew();
            trm   = tangramFeatureModelList.Detect(msl.datas.ToArray());
            watch.Stop();
            elapsedMs = watch.ElapsedMilliseconds;

            mut.ReleaseMutex();
        }).ObserveOnMainThread().Subscribe((rx) =>
        {
            prc(trm, lms);
            if (debug == true)
            {
                mut.WaitOne();

                if (texture != null && debug == true)
                {
                    Utils.matToTexture2D(dbMat, texture);
                }
                if (dbText1 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat2copy, dbText1);
                }
                if (dbText2 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat3, dbText2);
                }
                if (dbText3 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat4, dbText3);
                }
                if (dbText4 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat, dbText4);
                }

                all_thresh_afct = all_thresh_afct * 25;
                Imgproc.cvtColor(rgbMat2, rgbMat2, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(all_thresh, all_thresh, Imgproc.COLOR_RGBA2RGB);
                Mat a = new Mat(all_thresh.size(), CvType.CV_8UC3);
                Core.addWeighted(all_thresh, 0.2, rgbMat2, 0.8, 0, a);
                if (dbText5 != null && debug == true)
                {
                    Utils.matToTexture2D(a, dbText5);
                }
                if (dbText6 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh, dbText6);
                }
                if (dbText7 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh_afct, dbText7);
                }
                if (dbText8 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh_af, dbText8);
                }
                mut.ReleaseMutex();
            }
        });
    }
コード例 #23
0
        //手を検出して画像に描画する
        private static void _handPoseEstimationProcess(Mat rgbaMat, Color handColor)
        {
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);

            //検出器に色を設定
            detector.setHsvColor(HGColorSpuiter.ColorToScalar(handColor));

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);
            if (contours.Count <= 0)
            {
                return;
            }

            //手の角度に傾いた外接矩形を作成
            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
            //手首までの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(WristRangeColor), 2, 8, 0);

            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //手のひらの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), HGColorSpuiter.ColorToScalar(PalmsRangeColor), 2, 8, 0);

            //折れ線カーブまたはポリゴンを,互いの距離が指定された精度以下になるように,より少ない頂点数のカーブやポリゴンで近似します
            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
            contours[boundPos] = new MatOfPoint(pointMat.toArray());

            //点とポリゴンの最短距離を計算
            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
            if (hull.toArray().Length < 3)
            {
                return;
            }
            Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);

            //手の範囲を取得
            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            //手の範囲を描画
            Imgproc.drawContours(rgbaMat, hullPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

            //指と認識した場所を取得
            List <MatOfPoint> defectPoints = new List <MatOfPoint>();
            List <Point>      listPoDefect = new List <Point>();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
                int   depth    = convexDefect.toList()[j + 3];
                if (depth > depthThreashold && farPoint.y < a)
                {
                    listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
                }
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

            //検出した指の本数を更新
            numberOfFingers = listPoDefect.Count;
            if (numberOfFingers > 5)
            {
                numberOfFingers = 5;
            }

            //指の間に点を描画
            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, HGColorSpuiter.ColorToScalar(BetweenFingersColor), -1);
            }
        }
コード例 #24
0
    string getFeatureTangram(string path)
    {
        Mat rgbMat = Imgcodecs.imread(path);

        var width  = rgbMat.width();
        var height = rgbMat.height();
        var ofsetx = 0;
        var ofsety = 0;

        if (width > 4096)
        {
            ofsetx = (width - 4096) / 2;
        }
        if (height > 4096)
        {
            ofsety = (height - 4096) / 2;
        }

        var rat = (float)rgbMat.width() / (float)rgbMat.height();

        Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_RGBA2BGR);
        Mat rgbMat2 = new Mat(rgbMat.size(), rgbMat.type());

        if (debug == true)
        {
            mainDebug.GetComponent <AspectRatioFitter>().aspectRatio = rat;
            debug1.GetComponent <AspectRatioFitter>().aspectRatio    = rat;
            debug2.GetComponent <AspectRatioFitter>().aspectRatio    = rat;
            debug3.GetComponent <AspectRatioFitter>().aspectRatio    = rat;
            debug4.GetComponent <AspectRatioFitter>().aspectRatio    = rat;
            debug5.GetComponent <AspectRatioFitter>().aspectRatio    = rat;
            debug6.GetComponent <AspectRatioFitter>().aspectRatio    = rat;
            debug7.GetComponent <AspectRatioFitter>().aspectRatio    = rat;
            debug8.GetComponent <AspectRatioFitter>().aspectRatio    = rat;
        }
        Mat hsvMat = new Mat();

        Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
        Debug.Log(rgbMat.width());

        if (debug == true)
        {
            maxHeight = (int)(maxWidth / rat);

            texture = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);
            dbTxt1  = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);
            dbTxt2  = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);
            dbTxt3  = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);
            dbTxt4  = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);
            dbTxt5  = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);
            dbTxt6  = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);
            dbTxt7  = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);
            dbTxt8  = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false);

            mainDebug.texture = texture;
            debug1.texture    = dbTxt1;
            debug2.texture    = dbTxt2;
            debug3.texture    = dbTxt3;
            debug4.texture    = dbTxt4;
            debug5.texture    = dbTxt5;
            debug6.texture    = dbTxt6;
            debug7.texture    = dbTxt7;
            debug8.texture    = dbTxt8;
        }

        if (debug)
        {
            Mat a = new Mat();
            Imgproc.resize(rgbMat, a, new Size(maxWidth, maxHeight));
            Utils.matToTexture2D(a, dbTxt4);
        }


        Mat threshold  = new Mat();
        Mat threshold2 = new Mat();

        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat          hierarchy     = new Mat();
        MatOfPoint2f mop2f         = new MatOfPoint2f();


        TangramShape   blackShape = new TangramShape();
        List <MyShape> ls_shapes  = new List <MyShape>();

        blackShape.datas = ls_shapes;


        bool[] OK = new bool[7];

        for (var obj_i = 0; obj_i < 7; obj_i++)
        {
            var obj = ls_obj[obj_i];

            Core.inRange(hsvMat, obj.HSVmin, obj.HSVmax, threshold);
            if (obj_i == (int)tgr.RED)
            {
                Core.inRange(hsvMat, obj.lower_HSVMin, obj.lower_HSVMax, threshold2);
                threshold2.copyTo(threshold, threshold2);
            }

            if (obj_i == (int)tgr.YELLOW)
            {
                if (debug)
                {
                    matToTexture(threshold, dbTxt3);
                }
            }

            contours.Clear();

            Imgproc.findContours(threshold, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            for (int ct_i = 0; ct_i < contours.Count; ct_i++)
            {
                if (Imgproc.contourArea(contours[ct_i]) < MIN_OBJECT_AREA)
                {
                    contours.RemoveAt(ct_i);
                    ct_i--;
                }
            }


            Scalar c = hsv2rgb(obj.getColor());
            for (int ct_i = 0; ct_i < contours.Count; ct_i++)
            {
                var ct   = contours[ct_i];
                var peri = Imgproc.arcLength(new MatOfPoint2f(ct.toArray()), true);

                Imgproc.approxPolyDP(new MatOfPoint2f(ct.toArray()), mop2f, 0.05 * peri, true);
                {
                    MyShape ms = new MyShape();

                    var points = mop2f.toArray();

                    var index     = -1;
                    var max       = -1d;
                    var numPoints = points.Length;
                    ms._id = obj_i;
                    ms.ps  = new Point[numPoints];

                    if (numPoints == 3)
                    {
                        OK[obj_i] = true;

                        for (var p_i = 0; p_i < numPoints; p_i++)
                        {
                            //Debug.LogFormat("p1 = {0}, p2 = {1}", p_i % numPoints, (p_i + 1) % numPoints);
                            var   p1  = points[p_i % numPoints];
                            var   p2  = points[(p_i + 1) % numPoints];
                            var   vt  = p2 - p1;
                            float len = (float)(vt.x * vt.x + vt.y * vt.y);
                            if (len > max)
                            {
                                index = p_i;
                                max   = len;
                            }
                        }
                        var i_nhon1 = index;
                        var i_nhon2 = (index + 1) % numPoints;
                        var i_vuong = (index + 2) % numPoints;

                        ms.ps[0] = points[i_vuong];
                        ms.ps[1] = points[i_nhon1];
                        ms.ps[2] = points[i_nhon2];

                        Imgproc.putText(rgbMat2, "1", points[i_nhon1], 1, 20, c, 10);
                        Imgproc.putText(rgbMat2, "2", points[i_nhon2], 1, 20, c, 10);
                        Imgproc.putText(rgbMat2, "0", points[i_vuong], 1, 20, c, 10);
                    }
                    else if (numPoints == 4)
                    {
                        if (obj_i == (int)tgr.YELLOW)
                        {
                            OK[obj_i] = true;
                            Debug.Log("Xin chao the mau vang");
                            ms.ps[0] = points[0];
                            ms.ps[1] = points[1];
                            ms.ps[2] = points[2];
                            ms.ps[3] = points[3];
                        }
                        else if (obj_i == (int)tgr.ORANGE)
                        {
                            OK[obj_i] = true;
                            Debug.Log("Xin chao the gioi");

                            var vt_cheo1 = points[0] - points[2];
                            var vt_cheo2 = points[1] - points[3];

                            var len_cheo1 = vt_cheo1.x * vt_cheo1.x + vt_cheo1.y * vt_cheo1.y;
                            var len_cheo2 = vt_cheo2.x * vt_cheo2.x + vt_cheo2.y * vt_cheo2.y;
                            var i_nhon    = 0;
                            if (len_cheo2 > len_cheo1)
                            {
                                i_nhon = 1;
                            }
                            ms.ps[0] = points[i_nhon];
                            ms.ps[1] = points[(i_nhon + 1)];
                            ms.ps[2] = points[(i_nhon + 2)];
                            ms.ps[3] = points[(i_nhon + 3) % numPoints];

                            var i_prvNhon  = (i_nhon + 4 - 1) % numPoints;
                            var i_aftNhon  = i_nhon + 1;
                            var vt_prvNhon = points[i_prvNhon] - points[i_nhon];
                            var vt_aftNhon = points[i_aftNhon] - points[i_nhon];

                            //Imgproc.line(rgbMat2, points[i_prvNhon], points[i_nhon], c, 10);

                            var len_prvNhon = vt_prvNhon.x * vt_prvNhon.x + vt_prvNhon.y * vt_prvNhon.y;
                            var len_aftNhon = vt_aftNhon.x * vt_aftNhon.x + vt_aftNhon.y * vt_aftNhon.y;
                            if (len_prvNhon > len_aftNhon)
                            {
                                ms.isFlip = true;
                                Imgproc.putText(rgbMat2, " IsFLIP", ms.ps[3], 1, 20, c, 10);
                            }
                            else
                            {
                                ms.isFlip = false;
                                Imgproc.putText(rgbMat2, " IsNOTFLIP", ms.ps[3], 1, 20, c, 10);
                            }



                            Debug.Log(ms.ps.Length);
                            Debug.Log((i_nhon + 3) % numPoints);

                            if (debug == true)
                            {
                                Imgproc.putText(rgbMat2, "0", ms.ps[0], 1, 20, c, 10);
                                Imgproc.putText(rgbMat2, "1", ms.ps[1], 1, 20, c, 10);
                                Imgproc.putText(rgbMat2, "2", ms.ps[2], 1, 20, c, 10);
                                Imgproc.putText(rgbMat2, "3", ms.ps[3], 1, 20, c, 10);
                            }
                        }
                    }

                    ls_shapes.Add(ms);
                }
            }
        }

        for (var ok_i = 0; ok_i < 7; ok_i++)
        {
            if (OK[ok_i] == false)
            {
                Debug.LogError("Sai mau: " + ok_i);
            }
        }


        if (debug)
        {
            Imgproc.circle(rgbMat2, new Point(1851, 3172), 20, yellow.getColor(), 10);
            Imgproc.circle(rgbMat2, new Point(1245, 2565), 20, yellow.getColor(), 10);
            Imgproc.circle(rgbMat2, new Point(883, 2925), 20, red.getColor(), 10);
            Imgproc.circle(rgbMat2, new Point(2100, 1709), 20, red.getColor(), 10);

            Mat a = new Mat();
            Imgproc.resize(rgbMat, a, new Size(maxWidth, maxHeight));
            Utils.matToTexture2D(a, texture);
            Imgproc.resize(hsvMat, a, new Size(maxWidth, maxHeight));
            Utils.matToTexture2D(a, dbTxt1);
            Imgproc.resize(rgbMat2, a, new Size(maxWidth, maxHeight));
            Utils.matToTexture2D(a, dbTxt2);
        }

        for (int i = 0; i < blackShape.datas.Count; i++)
        {
            for (int j = 0; j < blackShape.datas[i].ps.Length; j++)
            {
                blackShape.datas[i].ps[j].x -= ofsetx;
                blackShape.datas[i].ps[j].y -= ofsety;
            }
        }

        var json = JsonUtility.ToJson(blackShape);

        return(json);
    }
コード例 #25
0
ファイル: Main.cs プロジェクト: eugenejahn/ARNoteTaking
        private Mat findPaper(Mat mainMat)
        {
            Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);
            // blur image
            Imgproc.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0);


            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels[i] = 0;

                if (grayPixels[i] < 70)
                {
                    grayPixels[i] = 0;

                    //maskPixels [i] = 1;
                }
                else if (70 <= grayPixels[i] && grayPixels[i] < 120)
                {
                    grayPixels[i] = 100;
                }
                else
                {
                    grayPixels[i] = 255;
                    //maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);

            //thresholding make mage blake and white
            Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU);

            //extract the edge image
            Imgproc.Canny(grayMat, grayMat, 50, 50);


            //prepare for finding contours
            List <MatOfPoint> contours = new List <MatOfPoint>();

            Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            List <MatOfPoint> tmpTargets = new List <MatOfPoint>();


            for (int i = 0; i < contours.Count; i++)
            {
                MatOfPoint   cp = contours[i];
                MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
                double       p  = Imgproc.arcLength(cn, true);

                MatOfPoint2f approx = new MatOfPoint2f();

                // lager skew greater 0.03?
                //convert contours to readable polyagon
                Imgproc.approxPolyDP(cn, approx, 0.03 * p, true);

                //find contours with 4 points
                if (approx.toArray().Length == 4)
                {
                    MatOfPoint approxPt = new MatOfPoint();
                    approx.convertTo(approxPt, CvType.CV_32S);
                    float maxCosine  = 0;
                    float rate       = 0;
                    float min_length = 100000000000000;


                    for (int j = 2; j < 5; j++)
                    {
                        Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y));
                        Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y));

                        float v1_length = Mathf.Sqrt(v1.x * v1.x + v1.y * v1.y);
                        float v2_length = Mathf.Sqrt(v2.x * v2.x + v2.y * v2.y);

                        min_length = Mathf.Min(Mathf.Min((float)(v1_length), (float)v2_length), min_length);


                        if (v1_length > v2_length)
                        {
                            rate = v2_length / v1_length;
                        }
                        else
                        {
                            rate = v1_length / v2_length;
                        }



                        float angle = Mathf.Abs(Vector2.Angle(v1, v2));
                        maxCosine = Mathf.Max(maxCosine, angle);
                    }


                    if (min_length > 100 && maxCosine < 135f)//  && rate >= 0.6  maxCosine < 135f &&
                    {
                        tmpTargets.Add(approxPt);
                        //Debug.Log("Length -----------" + min_length);

                        //Debug.Log("------------rate" + rate + "---------------");
                    }
                }
            }
            if (tmpTargets.Count > 0)
            {
                // -----------------------DRAW RECTANGLE---------------------------
                //MatOfPoint2f approxCurve = new MatOfPoint2f();

                //for (int i = 0; i < tmpTargets.Count; i++)
                //{
                //    //Convert contours(i) from MatOfPoint to MatOfPoint2f
                //    MatOfPoint2f contour2f = new MatOfPoint2f(tmpTargets[i].toArray());
                //    //Processing on mMOP2f1 which is in type MatOfPoint2f
                //    double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02;
                //    Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true);

                //    //Convert back to MatOfPoint
                //    MatOfPoint points = new MatOfPoint(approxCurve.toArray());

                //    // Get bounding rect of contour
                //    OpenCVForUnity.Rect rect = Imgproc.boundingRect(points);

                //    // draw enclosing rectangle (all same color, but you could use variable i to make them unique)
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 130, 255), 3);
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + 5, rect.y + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y), new Point(rect.x + +rect.width + 5, rect.y + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y + rect.height), new Point(rect.x + +rect.width + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y + rect.height), new Point(rect.x + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5);

                //}
                // -----------------------DRAW RECTANGLE---------------------------



                // get the first contours

                int largestPaper = findLargestContour(tmpTargets);
                //Debug.Log(largestPaper);
                // using the largest one
                paperCornerMatOfPoint = tmpTargets[largestPaper];


                // draw boundary
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[1], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[2], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[1], paperCornerMatOfPoint.toList()[2], new Scalar(0, 255, 0), 3);

                // extract target from the frame and adjust some angle....
                Mat srcPointsMat = Converters.vector_Point_to_Mat(paperCornerMatOfPoint.toList(), CvType.CV_32F);

                List <Point> dstPoints = new List <Point>();
                dstPoints.Add(new Point(0, 0));
                dstPoints.Add(new Point(0, 300));
                dstPoints.Add(new Point(200, 300));
                dstPoints.Add(new Point(200, 0));

                Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);
                //Make perspective transform
                Mat m         = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat);
                Mat warpedMat = new Mat(mainMat.size(), mainMat.type());
                Imgproc.warpPerspective(mainMat, warpedMat, m, new Size(200, 300), Imgproc.INTER_LINEAR);
                warpedMat.convertTo(warpedMat, CvType.CV_8UC3);


                Texture2D finalTargetTextue = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false);
                Utils.matToTexture2D(warpedMat, finalTargetTextue);

                targetRawImage.texture = finalTargetTextue;
                //Debug.Log(paperCornerMatOfPoint.toList()[0].ToString() + " " + paperCornerMatOfPoint.toList()[1].ToString()+ " " + paperCornerMatOfPoint.toList()[2].ToString()+ " " + paperCornerMatOfPoint.toList()[3].ToString());
            }
            //--------------------------------------------------------


            return(mainMat);
        }
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(rgbaMat);

            //Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));

            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //(int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");

            Point bottomLeft  = new Point(boundRect.x, boundRect.y + boundRect.height);
            Point topLeft     = new Point(boundRect.x, boundRect.y);
            Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height);
            Point topRight    = new Point(boundRect.x + boundRect.width, boundRect.y);

            rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y),                                      //topleft
                                          new Point(boundRect.x + boundRect.width, boundRect.y),                    //Top Right
                                          new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right
                                          new Point(boundRect.x, boundRect.y + boundRect.height)                    //Bottom Left
                                          );

            //double a = boundRect.br ().y - boundRect.tl ().y;
            //a = a * 0.7;
            //a = boundRect.tl ().y + a;

            //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

            List <Point3> m_markerCorners3dList = new List <Point3>();

            m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A)
            m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B)
            m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C)
            m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D)
            m_markerCorners3d.fromList(m_markerCorners3dList);

            //estimate pose
            Mat Rvec = new Mat();
            Mat Tvec = new Mat();
            Mat raux = new Mat();
            Mat taux = new Mat();

            Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux);

            raux.convertTo(Rvec, CvType.CV_32F);
            taux.convertTo(Tvec, CvType.CV_32F);

            rotMat = new Mat(3, 3, CvType.CV_64FC1);
            Calib3d.Rodrigues(Rvec, rotMat);

            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            //Debug.Log ("transformationM " + transformationM.ToString ());

            Rvec.Dispose();
            Tvec.Dispose();
            raux.Dispose();
            taux.Dispose();
            rotMat.Dispose();

            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
            //Debug.Log("arM " + ARM.ToString());

            if (ARGameObject != null)
            {
                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                if (deactivateCoroutine == null)
                {
                    deactivateCoroutine = StartCoroutine(Wait(10.0f));
                }
                ARGameObject.SetActive(true);
            }

            //end pose estimation

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <Point> listPoDefect = new List <Point>();

            if (convexDefect.rows() > 0)
            {
                List <int>   convexDefectList = convexDefect.toList();
                List <Point> contourList      = contour.toList();
                for (int j = 0; j < convexDefectList.Count; j = j + 4)
                {
                    Point farPoint = contourList[convexDefectList[j + 2]];
                    int   depth    = convexDefectList[j + 3];
                    //if (depth > threasholdSlider.value && farPoint.y < a)
                    //{
                    //    listPoDefect.Add(contourList[convexDefectList[j + 2]]);
                    //}
                    //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
                }
            }


            Debug.Log("hull: " + hull.toList());
            if (convexDefect.rows() > 0)
            {
                Debug.Log("defects: " + convexDefect.toList());
            }

            //use these contours to do heart detection
            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            int defectsTotal = (int)convexDefect.total();

            Debug.Log("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

            Debug.Log("numberOfFingers " + numberOfFingers);

            Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }