private void GetCubies(List <MatOfPoint> contours, Mat imgMat, int index, List <Cubies> cubies)
    {
        MatOfPoint2f matOfPoint2f = new MatOfPoint2f();
        MatOfPoint2f approxCurve  = new MatOfPoint2f();
        MatOfPoint   approx       = new MatOfPoint();

        foreach (var contour in contours)
        {
            matOfPoint2f.fromList(contour.toList());
            Imgproc.approxPolyDP(matOfPoint2f, approxCurve, 0.1 * Imgproc.arcLength(matOfPoint2f, true), true);

            try
            {
                approxCurve.convertTo(approx, CvType.CV_32S);
                OpenCVForUnity.Rect rect = Imgproc.boundingRect(approx);

                if (approx.total() == 4)
                {
                    cubies.Add(new Cubies(rect.x, rect.y, colorsList[index]));
                    Imgproc.rectangle(imgMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 40, 150), 2);
                }
            }
            catch (ArgumentOutOfRangeException e) { }
        }

        print("Number of cubies: " + cubies.Count);
    }
Beispiel #2
0
    //辨識輪廓
    private bool analysisContoursRect(int index, List <MatOfPoint> contours, Mat result, List <MatchObject> matchObject)
    {
        OpenCVForUnity.Rect _testDepthRect = Imgproc.boundingRect(contours[index]);
        float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (_testDepthRect.area() > minAreaSize)
        {
            //宣告放置點資料
            MatOfInt          hullInt       = new MatOfInt();
            List <Point>      hullPointList = new List <Point>();
            MatOfPoint        hullPointMat  = new MatOfPoint();
            List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
            MatOfInt4         defects       = new MatOfInt4();
            //篩選點資料
            MatOfPoint2f Temp2f = new MatOfPoint2f();
            //Convert contours(i) from MatOfPoint to MatOfPoint2f
            contours[index].convertTo(Temp2f, CvType.CV_32FC2);
            //Processing on mMOP2f1 which is in type MatOfPoint2f
            Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
            //Convert back to MatOfPoint and put the new values back into the contours list
            Temp2f.convertTo(contours[index], CvType.CV_32S);

            //计算轮廓围绕的凸形壳
            Imgproc.convexHull(contours[index], hullInt);
            List <Point> pointMatList = contours[index].toList();
            List <int>   hullIntList  = hullInt.toList();
            for (int j = 0; j < hullInt.toList().Count; j++)
            {
                hullPointList.Add(pointMatList[hullIntList[j]]);
                hullPointMat.fromList(hullPointList);
                hullPoints.Add(hullPointMat);
            }
            if (hullInt.toList().Count == 4)
            {
                if (!setMatchObject(index, pointMatList, contours, hullPoints, result, matchObject))
                {
                    //Debug.Log("setMatchObject fail");
                }
            }
            //清空記憶體
            defects.Dispose();
            hullPointList.Clear();
            hullPointMat.Dispose();
            hullInt.Dispose();
            hullPoints.Clear();
            return(true);
        }
        return(false);
    }
Beispiel #3
0
        private void Find4PointContours(Mat image, List <MatOfPoint> contours)
        {
            contours.Clear();
            List <MatOfPoint> tmp_contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();

            Imgproc.findContours(image, tmp_contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            foreach (var cnt in tmp_contours)
            {
                MatOfInt hull = new MatOfInt();
                Imgproc.convexHull(cnt, hull, false);

                Point[] cnt_arr  = cnt.toArray();
                int[]   hull_arr = hull.toArray();
                Point[] pts      = new Point[hull_arr.Length];
                for (int i = 0; i < hull_arr.Length; i++)
                {
                    pts[i] = cnt_arr[hull_arr[i]];
                }

                MatOfPoint2f ptsFC2    = new MatOfPoint2f(pts);
                MatOfPoint2f approxFC2 = new MatOfPoint2f();
                MatOfPoint   approxSC2 = new MatOfPoint();

                double arclen = Imgproc.arcLength(ptsFC2, true);
                Imgproc.approxPolyDP(ptsFC2, approxFC2, 0.01 * arclen, true);
                approxFC2.convertTo(approxSC2, CvType.CV_32S);

                if (approxSC2.size().area() != 4)
                {
                    continue;
                }

                contours.Add(approxSC2);
            }
        }
Beispiel #4
0
        private Mat findPaper(Mat mainMat)
        {
            Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);
            // blur image
            Imgproc.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0);


            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels[i] = 0;

                if (grayPixels[i] < 70)
                {
                    grayPixels[i] = 0;

                    //maskPixels [i] = 1;
                }
                else if (70 <= grayPixels[i] && grayPixels[i] < 120)
                {
                    grayPixels[i] = 100;
                }
                else
                {
                    grayPixels[i] = 255;
                    //maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);

            //thresholding make mage blake and white
            Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU);

            //extract the edge image
            Imgproc.Canny(grayMat, grayMat, 50, 50);


            //prepare for finding contours
            List <MatOfPoint> contours = new List <MatOfPoint>();

            Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            List <MatOfPoint> tmpTargets = new List <MatOfPoint>();


            for (int i = 0; i < contours.Count; i++)
            {
                MatOfPoint   cp = contours[i];
                MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
                double       p  = Imgproc.arcLength(cn, true);

                MatOfPoint2f approx = new MatOfPoint2f();

                // lager skew greater 0.03?
                //convert contours to readable polyagon
                Imgproc.approxPolyDP(cn, approx, 0.03 * p, true);

                //find contours with 4 points
                if (approx.toArray().Length == 4)
                {
                    MatOfPoint approxPt = new MatOfPoint();
                    approx.convertTo(approxPt, CvType.CV_32S);
                    float maxCosine  = 0;
                    float rate       = 0;
                    float min_length = 100000000000000;


                    for (int j = 2; j < 5; j++)
                    {
                        Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y));
                        Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y));

                        float v1_length = Mathf.Sqrt(v1.x * v1.x + v1.y * v1.y);
                        float v2_length = Mathf.Sqrt(v2.x * v2.x + v2.y * v2.y);

                        min_length = Mathf.Min(Mathf.Min((float)(v1_length), (float)v2_length), min_length);


                        if (v1_length > v2_length)
                        {
                            rate = v2_length / v1_length;
                        }
                        else
                        {
                            rate = v1_length / v2_length;
                        }



                        float angle = Mathf.Abs(Vector2.Angle(v1, v2));
                        maxCosine = Mathf.Max(maxCosine, angle);
                    }


                    if (min_length > 100 && maxCosine < 135f)//  && rate >= 0.6  maxCosine < 135f &&
                    {
                        tmpTargets.Add(approxPt);
                        //Debug.Log("Length -----------" + min_length);

                        //Debug.Log("------------rate" + rate + "---------------");
                    }
                }
            }
            if (tmpTargets.Count > 0)
            {
                // -----------------------DRAW RECTANGLE---------------------------
                //MatOfPoint2f approxCurve = new MatOfPoint2f();

                //for (int i = 0; i < tmpTargets.Count; i++)
                //{
                //    //Convert contours(i) from MatOfPoint to MatOfPoint2f
                //    MatOfPoint2f contour2f = new MatOfPoint2f(tmpTargets[i].toArray());
                //    //Processing on mMOP2f1 which is in type MatOfPoint2f
                //    double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02;
                //    Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true);

                //    //Convert back to MatOfPoint
                //    MatOfPoint points = new MatOfPoint(approxCurve.toArray());

                //    // Get bounding rect of contour
                //    OpenCVForUnity.Rect rect = Imgproc.boundingRect(points);

                //    // draw enclosing rectangle (all same color, but you could use variable i to make them unique)
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 130, 255), 3);
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + 5, rect.y + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y), new Point(rect.x + +rect.width + 5, rect.y + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y + rect.height), new Point(rect.x + +rect.width + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5);
                //    Imgproc.rectangle(mainMat, new Point(rect.x, rect.y + rect.height), new Point(rect.x + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5);

                //}
                // -----------------------DRAW RECTANGLE---------------------------



                // get the first contours

                int largestPaper = findLargestContour(tmpTargets);
                //Debug.Log(largestPaper);
                // using the largest one
                paperCornerMatOfPoint = tmpTargets[largestPaper];


                // draw boundary
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[1], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[2], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3);
                Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[1], paperCornerMatOfPoint.toList()[2], new Scalar(0, 255, 0), 3);

                // extract target from the frame and adjust some angle....
                Mat srcPointsMat = Converters.vector_Point_to_Mat(paperCornerMatOfPoint.toList(), CvType.CV_32F);

                List <Point> dstPoints = new List <Point>();
                dstPoints.Add(new Point(0, 0));
                dstPoints.Add(new Point(0, 300));
                dstPoints.Add(new Point(200, 300));
                dstPoints.Add(new Point(200, 0));

                Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);
                //Make perspective transform
                Mat m         = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat);
                Mat warpedMat = new Mat(mainMat.size(), mainMat.type());
                Imgproc.warpPerspective(mainMat, warpedMat, m, new Size(200, 300), Imgproc.INTER_LINEAR);
                warpedMat.convertTo(warpedMat, CvType.CV_8UC3);


                Texture2D finalTargetTextue = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false);
                Utils.matToTexture2D(warpedMat, finalTargetTextue);

                targetRawImage.texture = finalTargetTextue;
                //Debug.Log(paperCornerMatOfPoint.toList()[0].ToString() + " " + paperCornerMatOfPoint.toList()[1].ToString()+ " " + paperCornerMatOfPoint.toList()[2].ToString()+ " " + paperCornerMatOfPoint.toList()[3].ToString());
            }
            //--------------------------------------------------------


            return(mainMat);
        }
Beispiel #5
0
    //利用深度的輪廓做RGB的顏色判斷
    public Mat getContours(Mat srcColorMat, Mat srcDepthMat)
    {
        Mat ColorMat = new Mat();
        Mat DepthMat = new Mat();
        Mat HsvMat   = new Mat();

        srcColorMat.copyTo(ColorMat);
        srcDepthMat.copyTo(DepthMat);
        Imgproc.cvtColor(ColorMat, HsvMat, Imgproc.COLOR_BGR2HSV);

        List <ColorObject> colorObjects        = new List <ColorObject>();
        Mat                  resultMat         = new Mat(DepthMat.height(), DepthMat.width(), CvType.CV_8UC1);
        Mat                  hierarchy         = new Mat();
        List <Point>         ConsistP          = new List <Point>();
        List <MatOfPoint>    contours          = new List <MatOfPoint>();
        List <List <Point> > trianglePointList = new List <List <Point> >();

        Imgproc.findContours(DepthMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        int           numObjects    = contours.Count;
        List <Scalar> clickRGB      = new List <Scalar>();
        List <Scalar> clickHSV      = new List <Scalar>();
        List <int>    HullCountList = new List <int>();

        for (int i = 0; i < numObjects; i++)
        {
            Imgproc.drawContours(resultMat, contours, i, new Scalar(255), 1);
        }
        double[] GetRGB      = new double[10];
        float    minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (numObjects > 0)
        {
            for (int index = 0; index < numObjects; index++)
            {
                OpenCVForUnity.Rect R0 = Imgproc.boundingRect(contours[index]);

                if (R0.area() > minAreaSize)
                {
                    //宣告放置點資料
                    MatOfInt          hullInt       = new MatOfInt();
                    List <Point>      hullPointList = new List <Point>();
                    MatOfPoint        hullPointMat  = new MatOfPoint();
                    List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
                    MatOfInt4         defects       = new MatOfInt4();
                    //篩選點資料
                    MatOfPoint2f Temp2f = new MatOfPoint2f();
                    //Convert contours(i) from MatOfPoint to MatOfPoint2f
                    contours[index].convertTo(Temp2f, CvType.CV_32FC2);
                    //Processing on mMOP2f1 which is in type MatOfPoint2f
                    Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
                    //Convert back to MatOfPoint and put the new values back into the contours list
                    Temp2f.convertTo(contours[index], CvType.CV_32S);

                    //计算轮廓围绕的凸形壳
                    Imgproc.convexHull(contours[index], hullInt);
                    List <Point> pointMatList = contours[index].toList();
                    List <int>   hullIntList  = hullInt.toList();
                    for (int j = 0; j < hullInt.toList().Count; j++)
                    {
                        hullPointList.Add(pointMatList[hullIntList[j]]);
                        hullPointMat.fromList(hullPointList);
                        hullPoints.Add(hullPointMat);
                    }
                    ConsistP.Add(new Point(R0.x, R0.y));
                    ConsistP.Add(new Point(R0.x + R0.width, R0.y + R0.height));
                    ConsistP.Add(new Point(R0.x + R0.width, R0.y));
                    ConsistP.Add(new Point(R0.x, R0.y + R0.height));
                    clickRGB.Add(clickcolor(ColorMat, R0));
                    clickHSV.Add(clickcolor(HsvMat, R0));
                    HullCountList.Add(hullIntList.Count);
                    trianglePointList.Add(pointMatList);
                    //清空記憶體
                    defects.Dispose();
                    hullPointList.Clear();
                    hullPointMat.Dispose();
                    hullInt.Dispose();
                    hullPoints.Clear();


                    //Debug.Log("ID = " +  index + " Color = " + clickcolor(ColorMat, R0));
                }
            }
            //使用顏色找尋物體
            _matchColorObjectList = setColorMatchObject(ConsistP, trianglePointList, clickRGB, clickHSV, resultMat, HullCountList);
        }
        return(resultMat);
    }
Beispiel #6
0
    void FormatImageSquare()
    {
        Mat mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3);
        Mat grayMat = new Mat();

        //Convert Texture2d to Matrix
        Utils.texture2DToMat(baseTexture, mainMat);
        //copy main matrix to grayMat
        mainMat.copyTo(grayMat);

        //Convert color to gray
        Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        //Blur
        Imgproc.GaussianBlur(grayMat, grayMat, new Size(25, 25), 0);

        //contrast
        Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU);
        //extract edge
        Imgproc.Canny(grayMat, grayMat, 50, 50);

        //prepare for the finding contours
        List <MatOfPoint> contours = new List <MatOfPoint>();

        //find the contour from canny edge image
        Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        List <MatOfPoint> tempTargets = new List <MatOfPoint>();

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfPoint   cp = contours[i];
            MatOfPoint2f cn = new MatOfPoint2f(cp.toArray());
            double       p  = Imgproc.arcLength(cn, true);

            MatOfPoint2f approx = new MatOfPoint2f();
            //convret to polygon
            Imgproc.approxPolyDP(cn, approx, 0.03 * p, true);


            //find a contour with four points and large area
            int minContourArea = 10000;
            if (approx.toArray().Length == 4 && Imgproc.contourArea(approx) > minContourArea)
            {
                MatOfPoint approxPt = new MatOfPoint();
                approx.convertTo(approxPt, CvType.CV_32S);

                float maxCosine = 0;
                for (int j = 2; j < 5; j++)
                {
                    Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y));
                    Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y));

                    float angle = Mathf.Abs(Vector2.Angle(v1, v2));
                    maxCosine = Mathf.Max(maxCosine, angle);
                }

                if (maxCosine < 135f)
                {
                    tempTargets.Add(approxPt);
                }
            }
        }

        if (tempTargets.Count > 0)
        {
            //Get the first contour
            MatOfPoint approxPt = tempTargets[0];
            //Making Source Mat
            Mat srcPointMat = Converters.vector_Point_to_Mat(approxPt.toList(), CvType.CV_32F);

            //Making Destination Mat
            /*change these values*/
            List <Point> dstPoints = new List <Point>();
            dstPoints.Add(new Point(512, 0));
            dstPoints.Add(new Point(0, 0));
            dstPoints.Add(new Point(0, 512));
            dstPoints.Add(new Point(512, 512));

            Mat dstPointMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);

            //Make Perp transform
            Mat M         = Imgproc.getPerspectiveTransform(srcPointMat, dstPointMat);
            Mat warpedMat = new Mat(mainMat.size(), mainMat.type());
            //Crop and warp the image
            Imgproc.warpPerspective(mainMat, warpedMat, M, new Size(512, 512), Imgproc.INTER_LINEAR);
            warpedMat.convertTo(warpedMat, CvType.CV_8UC3);



            //Convert color to gray
            Imgproc.cvtColor(warpedMat, warpedMat, Imgproc.COLOR_BGR2GRAY);
            ////Blur
            //Imgproc.GaussianBlur(warpedMat, warpedMat, new Size(25, 25), 0);

            //contrast
            Imgproc.threshold(warpedMat, warpedMat, 0, 255, Imgproc.THRESH_OTSU);
            //resize
            Imgproc.resize(warpedMat, warpedMat, new Size(28, 28));
            //Create an empty final texture
            finalTexture = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false);
            //Convert material to texture2d
            Utils.matToTexture2D(warpedMat, finalTexture);
            targetRawImage.texture = finalTexture;
        }
    }