Example #1
0
    protected static MatOfPoint convertIndexToPoint(MatOfInt index, MatOfPoint contour)
    {
        Point[] arrPoint  = contour.toArray();
        int[]   arrIndex  = index.toArray();
        Point[] arrResult = new Point[arrIndex.Length];

        for (int i = 0; i < arrIndex.Length; i++)
        {
            arrResult[i] = arrPoint[arrIndex[i]];
        }

        MatOfPoint hull = new MatOfPoint();

        hull.fromArray(arrResult);
        return(hull);
    }
Example #2
0
        private void EstimateHand(Mat mat, List <MatOfPoint> contours, RecordHandDetectResult resultSetter)
        {
            //画像処理としてはcontourがあったが、今調べてる側については
            if (contours.Count == 0)
            {
                resultSetter.HasValidHandArea = false;
                return;
            }

            var contour = SelectLargestContour(contours);

            var boundRect = Imgproc.boundingRect(contour);
            //画像の下側で手首の凹み部分を検出することがあるのを、指の凹みと誤認識しないためのガードです。
            double defectMinY = boundRect.y + boundRect.height * 0.7;

            var pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            var handArea       = Imgproc.minAreaRect(pointMat);
            var handAreaCenter = handArea.center;
            var handAreaSize   = handArea.size;

            //方向固定のBoundを使うとこう。
            resultSetter.HandAreaCenter   = new Vector2(boundRect.x + boundRect.width / 2, boundRect.y + boundRect.height / 2);
            resultSetter.HandAreaSize     = new Vector2(boundRect.width, boundRect.height);
            resultSetter.HandAreaRotation = (float)handArea.angle;

            //OBBを使うとこうなるが、これだけだとangleが45度超えてるときの挙動が直感に反する事があるので要注意
            // resultSetter.HandAreaCenter = new Vector2((float)handAreaCenter.x, (float)handAreaCenter.y);
            // resultSetter.HandAreaSize = new Vector2((float)handAreaSize.width, (float)handAreaSize.height);
            // resultSetter.HandAreaRotation = (float)handArea.angle;

            Imgproc.convexHull(contour, _hullIndices);
            var hullIndicesArray = _hullIndices.toArray();

            //通常ありえないが、凸包がちゃんと作れてないケース
            if (hullIndicesArray.Length < 3)
            {
                resultSetter.HasValidHandArea = false;
                return;
            }

            UpdateConvexityDefection(contour, _hullIndices, defectMinY, resultSetter);
        }
Example #3
0
        private void Find4PointContours(Mat image, List <MatOfPoint> contours)
        {
            contours.Clear();
            List <MatOfPoint> tmp_contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();

            Imgproc.findContours(image, tmp_contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            foreach (var cnt in tmp_contours)
            {
                MatOfInt hull = new MatOfInt();
                Imgproc.convexHull(cnt, hull, false);

                Point[] cnt_arr  = cnt.toArray();
                int[]   hull_arr = hull.toArray();
                Point[] pts      = new Point[hull_arr.Length];
                for (int i = 0; i < hull_arr.Length; i++)
                {
                    pts[i] = cnt_arr[hull_arr[i]];
                }

                MatOfPoint2f ptsFC2    = new MatOfPoint2f(pts);
                MatOfPoint2f approxFC2 = new MatOfPoint2f();
                MatOfPoint   approxSC2 = new MatOfPoint();

                double arclen = Imgproc.arcLength(ptsFC2, true);
                Imgproc.approxPolyDP(ptsFC2, approxFC2, 0.01 * arclen, true);
                approxFC2.convertTo(approxSC2, CvType.CV_32S);

                if (approxSC2.size().area() != 4)
                {
                    continue;
                }

                contours.Add(approxSC2);
            }
        }
Example #4
0
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);

//						Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours [boundPos].toArray()));
            Core.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

//						Debug.Log (
//						" Row start [" +
//								(int)boundRect.tl ().y + "] row end [" +
//								(int)boundRect.br ().y + "] Col start [" +
//								(int)boundRect.tl ().x + "] Col end [" +
//								(int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

//						Debug.Log (
//						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
            Core.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours [boundPos].toArray()), pointMat, 3, true);
            contours [boundPos] = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours [boundPos].toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contours [boundPos].toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();
            List <Point>      listPo     = new List <Point> ();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours [boundPos].toList() [hull.toList() [j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <MatOfPoint> defectPoints = new List <MatOfPoint> ();
            List <Point>      listPoDefect = new List <Point> ();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours [boundPos].toList() [convexDefect.toList() [j + 2]];
                int   depth    = convexDefect.toList() [j + 3];
                if (depth > threasholdSlider.value && farPoint.y < a)
                {
                    listPoDefect.Add(contours [boundPos].toList() [convexDefect.toList() [j + 2]]);
                }
//								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

//						Debug.Log ("hull: " + hull.toList ());
//						Debug.Log ("defects: " + convexDefect.toList ());

            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

//                      int defectsTotal = (int)convexDefect.total();
//						Debug.Log ("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

//						Debug.Log ("numberOfFingers " + numberOfFingers);

//						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Core.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Example #5
0
        /// <summary>
        /// Recognizes the markers.
        /// </summary>
        /// <param name="grayscale">Grayscale.</param>
        /// <param name="detectedMarkers">Detected markers.</param>
        void recognizeMarkers(Mat grayscale, List <Marker> detectedMarkers)
        {
            List <Marker> goodMarkers = new List <Marker> ();

            // Identify the markers
            for (int i = 0; i < detectedMarkers.Count; i++)
            {
                Marker marker = detectedMarkers [i];


                // Find the perspective transformation that brings current marker to rectangular form
                Mat markerTransform = Imgproc.getPerspectiveTransform(new MatOfPoint2f(marker.points.toArray()), m_markerCorners2d);


                // Transform image to get a canonical marker image
                Imgproc.warpPerspective(grayscale, canonicalMarkerImage, markerTransform, markerSize);

                for (int p = 0; p < m_markerDesigns.Count; p++)
                {
                    MatOfInt nRotations = new MatOfInt(0);
                    int      id         = Marker.getMarkerId(canonicalMarkerImage, nRotations, m_markerDesigns [p]);
                    if (id != -1)
                    {
                        marker.id = id;
//                              Debug.Log ("id " + id);

                        //sort the points so that they are always in the same order no matter the camera orientation
                        List <Point> MarkerPointsList = marker.points.toList();

                        //              std::rotate(marker.points.begin(), marker.points.begin() + 4 - nRotations, marker.points.end());
                        MarkerPointsList = MarkerPointsList.Skip(4 - nRotations.toArray() [0]).Concat(MarkerPointsList.Take(4 - nRotations.toArray() [0])).ToList();

                        marker.points.fromList(MarkerPointsList);

                        goodMarkers.Add(marker);
                    }
                    nRotations.Dispose();
                }
            }

//              Debug.Log ("goodMarkers " + goodMarkers.Count);

            // Refine marker corners using sub pixel accuracy
            if (goodMarkers.Count > 0)
            {
                List <Point> preciseCornersPoint = new List <Point> (4 * goodMarkers.Count);
                for (int i = 0; i < preciseCornersPoint.Capacity; i++)
                {
                    preciseCornersPoint.Add(new Point(0, 0));
                }



                for (int i = 0; i < goodMarkers.Count; i++)
                {
                    Marker marker = goodMarkers [i];

                    List <Point> markerPointsList = marker.points.toList();

                    for (int c = 0; c < 4; c++)
                    {
                        preciseCornersPoint [i * 4 + c] = markerPointsList [c];
                    }
                }

                MatOfPoint2f preciseCorners = new MatOfPoint2f(preciseCornersPoint.ToArray());

                TermCriteria termCriteria = new TermCriteria(TermCriteria.MAX_ITER | TermCriteria.EPS, 30, 0.01);
                Imgproc.cornerSubPix(grayscale, preciseCorners, new Size(5, 5), new Size(-1, -1), termCriteria);

                preciseCornersPoint = preciseCorners.toList();

                // Copy refined corners position back to markers
                for (int i = 0; i < goodMarkers.Count; i++)
                {
                    Marker marker = goodMarkers [i];

                    List <Point> markerPointsList = marker.points.toList();

                    for (int c = 0; c < 4; c++)
                    {
                        markerPointsList [c] = preciseCornersPoint [i * 4 + c];
                    }
                }
                preciseCorners.Dispose();
            }

            detectedMarkers.Clear();
            detectedMarkers.AddRange(goodMarkers);
        }
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
                        Imgproc.GaussianBlur (rgbaMat, rgbaMat, new OpenCVForUnity.Size (3, 3), 1, 1);
                        //Imgproc.medianBlur(mRgba, mRgba, 3);

                        if (!isColorSelected)
                                return;

                        List<MatOfPoint> contours = detector.getContours ();
                        detector.process (rgbaMat);

            //						Debug.Log ("Contours count: " + contours.Count);

                        if (contours.Count <= 0) {
                                return;
                        }

                        RotatedRect rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [0].toArray ()));

                        double boundWidth = rect.size.width;
                        double boundHeight = rect.size.height;
                        int boundPos = 0;

                        for (int i = 1; i < contours.Count; i++) {
                                rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [i].toArray ()));
                                if (rect.size.width * rect.size.height > boundWidth * boundHeight) {
                                        boundWidth = rect.size.width;
                                        boundHeight = rect.size.height;
                                        boundPos = i;
                                }
                        }

                        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect (new MatOfPoint (contours [boundPos].toArray ()));
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), boundRect.br (), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //						Debug.Log (
            //						" Row start [" +
            //								(int)boundRect.tl ().y + "] row end [" +
            //								(int)boundRect.br ().y + "] Col start [" +
            //								(int)boundRect.tl ().x + "] Col end [" +
            //								(int)boundRect.br ().x + "]");

                        double a = boundRect.br ().y - boundRect.tl ().y;
                        a = a * 0.7;
                        a = boundRect.tl ().y + a;

            //						Debug.Log (
            //						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

                        //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

                        MatOfPoint2f pointMat = new MatOfPoint2f ();
                        Imgproc.approxPolyDP (new MatOfPoint2f (contours [boundPos].toArray ()), pointMat, 3, true);
                        contours [boundPos] = new MatOfPoint (pointMat.toArray ());

                        MatOfInt hull = new MatOfInt ();
                        MatOfInt4 convexDefect = new MatOfInt4 ();
                        Imgproc.convexHull (new MatOfPoint (contours [boundPos].toArray ()), hull);

                        if (hull.toArray ().Length < 3)
                                return;

                        Imgproc.convexityDefects (new MatOfPoint (contours [boundPos]	.toArray ()), hull, convexDefect);

                        List<MatOfPoint> hullPoints = new List<MatOfPoint> ();
                        List<Point> listPo = new List<Point> ();
                        for (int j = 0; j < hull.toList().Count; j++) {
                                listPo.Add (contours [boundPos].toList () [hull.toList () [j]]);
                        }

                        MatOfPoint e = new MatOfPoint ();
                        e.fromList (listPo);
                        hullPoints.Add (e);

                        List<MatOfPoint> defectPoints = new List<MatOfPoint> ();
                        List<Point> listPoDefect = new List<Point> ();
                        for (int j = 0; j < convexDefect.toList().Count; j = j+4) {
                                Point farPoint = contours [boundPos].toList () [convexDefect.toList () [j + 2]];
                                int depth = convexDefect.toList () [j + 3];
                                if (depth > threasholdSlider.value && farPoint.y < a) {
                                        listPoDefect.Add (contours [boundPos].toList () [convexDefect.toList () [j + 2]]);
                                }
            //								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
                        }

                        MatOfPoint e2 = new MatOfPoint ();
                        e2.fromList (listPo);
                        defectPoints.Add (e2);

            //						Debug.Log ("hull: " + hull.toList ());
            //						Debug.Log ("defects: " + convexDefect.toList ());

                        Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            //                      int defectsTotal = (int)convexDefect.total();
            //						Debug.Log ("Defect total " + defectsTotal);

                        this.numberOfFingers = listPoDefect.Count;
                        if (this.numberOfFingers > 5)
                                this.numberOfFingers = 5;

            //						Debug.Log ("numberOfFingers " + numberOfFingers);

            //						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
                        numberOfFingersText.text = numberOfFingers.ToString ();

                        foreach (Point p in listPoDefect) {
                                Imgproc.circle (rgbaMat, p, 6, new Scalar (255, 0, 255, 255), -1);
                        }
        }
		/// <summary>
		/// Recognizes the markers.
		/// </summary>
		/// <param name="grayscale">Grayscale.</param>
		/// <param name="detectedMarkers">Detected markers.</param>
		void recognizeMarkers (Mat grayscale, List<Marker> detectedMarkers)
		{
				List<Marker> goodMarkers = new List<Marker> ();
		
				// Identify the markers
				for (int i=0; i<detectedMarkers.Count; i++) {
						Marker marker = detectedMarkers [i];

			
						// Find the perspective transformation that brings current marker to rectangular form
						Mat markerTransform = Imgproc.getPerspectiveTransform (new MatOfPoint2f (marker.points.toArray ()), m_markerCorners2d);
				

						// Transform image to get a canonical marker image
						Imgproc.warpPerspective (grayscale, canonicalMarkerImage, markerTransform, markerSize);
			
						MatOfInt nRotations = new MatOfInt (0);
						int id = Marker.getMarkerId (canonicalMarkerImage, nRotations, m_markerDesign);
						if (id != - 1) {
								marker.id = id;
//				                Debug.Log ("id " + id);

								//sort the points so that they are always in the same order no matter the camera orientation
								List<Point> MarkerPointsList = marker.points.toList ();

								//				std::rotate(marker.points.begin(), marker.points.begin() + 4 - nRotations, marker.points.end());
								MarkerPointsList = MarkerPointsList.Skip (4 - nRotations.toArray () [0]).Concat (MarkerPointsList.Take (4 - nRotations.toArray () [0])).ToList ();

								marker.points.fromList (MarkerPointsList);
				
								goodMarkers.Add (marker);
						}
						nRotations.Dispose ();
				}

//				Debug.Log ("goodMarkers " + goodMarkers.Count);
		
				// Refine marker corners using sub pixel accuracy
				if (goodMarkers.Count > 0) {
						List<Point> preciseCornersPoint = new List<Point> (4 * goodMarkers.Count);
						for (int i = 0; i < preciseCornersPoint.Capacity; i++) {
								preciseCornersPoint.Add (new Point (0, 0));
						}
						

			
						for (int i=0; i<goodMarkers.Count; i++) {
								Marker marker = goodMarkers [i];

								List<Point> markerPointsList = marker.points.toList ();
				
								for (int c = 0; c <4; c++) {
										preciseCornersPoint [i * 4 + c] = markerPointsList [c];
								}
						}

						MatOfPoint2f preciseCorners = new MatOfPoint2f (preciseCornersPoint.ToArray ());

						TermCriteria termCriteria = new TermCriteria (TermCriteria.MAX_ITER | TermCriteria.EPS, 30, 0.01);
						Imgproc.cornerSubPix (grayscale, preciseCorners, new Size (5, 5), new Size (-1, -1), termCriteria);

						preciseCornersPoint = preciseCorners.toList ();
			
						// Copy refined corners position back to markers
						for (int i=0; i<goodMarkers.Count; i++) {
								Marker marker = goodMarkers [i];

								List<Point> markerPointsList = marker.points.toList ();
				
								for (int c=0; c<4; c++) {
										markerPointsList [c] = preciseCornersPoint [i * 4 + c];
								}
						}
						preciseCorners.Dispose ();
				}

				detectedMarkers.Clear ();
				detectedMarkers.AddRange (goodMarkers);

		}
Example #8
0
    public int getAnswerNumber(Mat align, Rect r)
    {
        Mat roi = new Mat(align, r);
        Mat roi_gray = new Mat(), roi_edges = new Mat();

        Imgproc.cvtColor(roi, roi_gray, Imgproc.COLOR_RGB2GRAY);
        Imgproc.Canny(roi_gray, roi_edges, 200, 200);
        // Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2 + 1, 2 + 1), new Point(1, 1));
        // Imgproc.dilate(roi_edges, roi_edges, element);

        //Shape detection
        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(roi_edges, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

        List <MatOfPoint> hulls = new List <MatOfPoint>();

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfInt hull_temp = new MatOfInt();
            Imgproc.convexHull(contours[i], hull_temp);
            int[]   arrIndex   = hull_temp.toArray();
            Point[] arrContour = contours[i].toArray();
            Point[] arrPoints  = new Point[arrIndex.Length];

            for (int k = 0; k < arrIndex.Length; k++)
            {
                arrPoints[k] = arrContour[arrIndex[k]];
            }

            MatOfPoint temp = new MatOfPoint();
            temp.fromArray(arrPoints);

            //Filter outliers
            if (Imgproc.contourArea(temp) > 40 && Imgproc.contourArea(temp) < 200)
            {
                hulls.Add(temp);
            }
        }

        List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();

        for (int i = 0; i < hulls.Count; i++)
        {
            MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
            hull2f.Add(newPoint);
        }

        for (int i = 0; i < hulls.Count; i++)
        {
            //Approximate polygon
            MatOfPoint2f approx = new MatOfPoint2f();
            Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
            List <Point> approx_polygon = approx.toList();
            approx_polygon = Scannerproc.filterPolygon(approx_polygon);
            double area = Imgproc.contourArea(approx);

            //Center of mass
            int cx = 0,
                cy = 0;
            for (int k = 0; k < approx_polygon.Count; k++)
            {
                cx += (int)approx_polygon[k].x;
                cy += (int)approx_polygon[k].y;
            }
            cx /= approx_polygon.Count;
            cy /= approx_polygon.Count;

            // Imgproc.circle(roi, new Point(cx, cy), 5, new Scalar(255), -1);


            // Texture2D tex = new Texture2D(roi.width(), roi.height(), TextureFormat.RGB24, false);
            // Utils.matToTexture2D(roi, tex);
            // byte[] bytes1 = tex.EncodeToJPG();
            // File.WriteAllBytes("D:/2019/OMR/" + "test.png", bytes1);

            Point pos1   = new Point((roi.width() * 1) / 10, cy);
            Point pos2   = new Point((roi.width() * 3) / 10, cy);
            Point pos3   = new Point((roi.width() * 5) / 10, cy);
            Point pos4   = new Point((roi.width() * 7) / 10, cy);
            Point pos5   = new Point((roi.width() * 9) / 10, cy);
            Point nowPos = new Point(cx, cy);

            double[] dist = new double[5];
            dist[0] = Scannerproc.distanceTwoPoints(pos1, nowPos);
            dist[1] = Scannerproc.distanceTwoPoints(pos2, nowPos);
            dist[2] = Scannerproc.distanceTwoPoints(pos3, nowPos);
            dist[3] = Scannerproc.distanceTwoPoints(pos4, nowPos);
            dist[4] = Scannerproc.distanceTwoPoints(pos5, nowPos);

            int    id       = -1;
            double min_dist = 999999;
            for (int t = 0; t < 5; t++)
            {
                if (dist[t] < min_dist)
                {
                    min_dist = dist[t];
                    id       = t;
                }
            }


            return(id + 1);

            //return plusPoints(tl, new Point(cx, cy));
        }



        return(0);
    }
Example #9
0
    public void getAnswerNumber(Mat align)
    {
        Mat align_gray = new Mat(), align_edges = new Mat();

        Imgproc.cvtColor(align, align_gray, Imgproc.COLOR_RGB2GRAY);
        Imgproc.Canny(align_gray, align_edges, 50, 50);
        Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2 + 1, 2 + 1), new Point(1, 1));

        Imgproc.dilate(align_edges, align_edges, element);


        //Shape detection
        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(align_edges, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

        List <MatOfPoint> hulls = new List <MatOfPoint>();

        //Texture2D tex = new Texture2D(align_edges.width(), align_edges.height(), TextureFormat.RGB24, false);
        //Utils.matToTexture2D(align_edges, tex);
        //byte[] bytes1 = tex.EncodeToJPG();
        //File.WriteAllBytes("D:/2019/OMR/" + "test.png", bytes1);

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfInt hull_temp = new MatOfInt();
            Imgproc.convexHull(contours[i], hull_temp);
            int[]   arrIndex   = hull_temp.toArray();
            Point[] arrContour = contours[i].toArray();
            Point[] arrPoints  = new Point[arrIndex.Length];

            for (int k = 0; k < arrIndex.Length; k++)
            {
                arrPoints[k] = arrContour[arrIndex[k]];
            }

            MatOfPoint temp = new MatOfPoint();
            temp.fromArray(arrPoints);

            //Filter outliers
            if (Imgproc.contourArea(temp) > 90000 && Imgproc.contourArea(temp) < 110000)
            {
                hulls.Add(temp);
            }
        }

        List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();

        for (int i = 0; i < hulls.Count; i++)
        {
            MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
            hull2f.Add(newPoint);
        }

        List <Rect> rects = new List <Rect>();

        for (int i = 0; i < hulls.Count; i++)
        {
            //Approximate polygon
            MatOfPoint2f approx = new MatOfPoint2f();
            Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
            List <Point> approx_polygon = approx.toList();
            approx_polygon = Scannerproc.filterPolygon(approx_polygon);
            double area = Imgproc.contourArea(approx);

            if (Scannerproc.isSquare(approx_polygon))
            {
                Rect r         = Imgproc.boundingRect(new MatOfPoint(approx_polygon.ToArray()));
                bool isContain = false;
                for (int k = 0; k < rects.Count; k++)
                {
                    if (Scannerproc.distanceTwoPoints(rects[k].tl(), r.tl()) < 100)
                    {
                        //if (rects[k].contains(r) || r.contains(rects[k]))
                        isContain = true;
                    }
                }

                if (!isContain)
                {
                    rects.Add(r);
                    // Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(255, 0, 0, 255), 3);

                    for (int j = 1; j < 21; j++)
                    {
                        Rect roi = new Rect((int)r.tl().x + (int)((r.width * 1.3) / 6), (int)r.tl().y + (r.height / 21) * j, (int)((r.width * 4.7) / 6), r.height / 21);
                        int  num = getAnswerNumber(align, roi);
                        if (num != 0)
                        {
                            Imgproc.putText(align, " " + num, new Point(roi.x - 40, roi.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false);
                            Imgproc.rectangle(align, roi.tl(), roi.br(), new Scalar(0, 255, 0, 255), 2);
                        }
                    }
                }
            }

            //Center of mass
            int cx = 0,
                cy = 0;
            for (int k = 0; k < approx_polygon.Count; k++)
            {
                cx += (int)approx_polygon[k].x;
                cy += (int)approx_polygon[k].y;
            }
            cx /= approx_polygon.Count;
            cy /= approx_polygon.Count;

            // Imgproc.circle(roi, new Point(cx, cy), 5, new Scalar(255), -1);
        }

        if (rects.Count == 4)
        {
            nowDetected = false;
        }
    }
Example #10
0
        /// <summary>
        /// Get result form all output
        /// </summary>
        /// <param name="output"></param>
        /// <param name="image"></param>
        /// <param name="threshold"></param>
        /// <param name="nmsThreshold">threshold for nms</param>
        /// <param name="nms">Enable Non-maximum suppression or not</param>
        private static void GetResult(IEnumerable <Mat> output, Mat image, float threshold, float nmsThreshold, bool nms = true)
        {
            //for nms
            List <int>    classIds      = new List <int>();
            List <float>  confidences   = new List <float>();
            List <float>  probabilities = new List <float>();
            List <Rect2d> boxes         = new List <Rect2d>();

            var w = image.width();
            var h = image.height();

            /*
             * YOLO3 COCO trainval output
             * 0 1 : center                    2 3 : w/h
             * 4 : confidence                  5 ~ 84 : class probability
             */
            const int prefix = 5;   //skip 0~4

            foreach (Mat prob in output)
            {
                for (int i = 0; i < prob.rows(); i++)
                {
                    var confidence = (float)prob.get(i, 4)[0];
                    if (confidence > threshold)
                    {
                        //get classes probability
                        Core.MinMaxLocResult minAndMax = Core.minMaxLoc(prob.row(i).colRange(prefix, prob.cols()));
                        int classes     = (int)minAndMax.maxLoc.x;
                        var probability = (float)prob.get(i, classes + prefix)[0];

                        if (probability > threshold) //more accuracy, you can cancel it
                        {
                            //get center and width/height
                            float centerX = (float)prob.get(i, 0)[0] * w;
                            float centerY = (float)prob.get(i, 1)[0] * h;
                            float width   = (float)prob.get(i, 2)[0] * w;
                            float height  = (float)prob.get(i, 3)[0] * h;

                            if (!nms)
                            {
                                // draw result (if don't use NMSBoxes)
                                Draw(image, classes, confidence, probability, centerX, centerY, width, height);
                                continue;
                            }

                            //put data to list for NMSBoxes
                            classIds.Add(classes);
                            confidences.Add(confidence);
                            probabilities.Add(probability);
                            boxes.Add(new Rect2d(centerX, centerY, width, height));
                        }
                    }
                }
            }

            if (!nms)
            {
                return;
            }

            //using non-maximum suppression to reduce overlapping low confidence box
            MatOfRect2d bboxes  = new MatOfRect2d();
            MatOfFloat  scores  = new MatOfFloat();
            MatOfInt    indices = new MatOfInt();

            bboxes.fromList(boxes);
            scores.fromList(probabilities);


            Dnn.NMSBoxes(bboxes, scores, threshold, nmsThreshold, indices);

            int[] indicesA = indices.toArray();

            foreach (var i in indicesA)
            {
                var box = boxes[i];
                Draw(image, classIds[i], confidences[i], probabilities[i], box.x, box.y, box.width, box.height);
            }
        }
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(rgbaMat);

            //Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));

            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //(int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");

            Point bottomLeft  = new Point(boundRect.x, boundRect.y + boundRect.height);
            Point topLeft     = new Point(boundRect.x, boundRect.y);
            Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height);
            Point topRight    = new Point(boundRect.x + boundRect.width, boundRect.y);

            rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y),                                      //topleft
                                          new Point(boundRect.x + boundRect.width, boundRect.y),                    //Top Right
                                          new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right
                                          new Point(boundRect.x, boundRect.y + boundRect.height)                    //Bottom Left
                                          );

            //double a = boundRect.br ().y - boundRect.tl ().y;
            //a = a * 0.7;
            //a = boundRect.tl ().y + a;

            //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

            List <Point3> m_markerCorners3dList = new List <Point3>();

            m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A)
            m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B)
            m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C)
            m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D)
            m_markerCorners3d.fromList(m_markerCorners3dList);

            //estimate pose
            Mat Rvec = new Mat();
            Mat Tvec = new Mat();
            Mat raux = new Mat();
            Mat taux = new Mat();

            Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux);

            raux.convertTo(Rvec, CvType.CV_32F);
            taux.convertTo(Tvec, CvType.CV_32F);

            rotMat = new Mat(3, 3, CvType.CV_64FC1);
            Calib3d.Rodrigues(Rvec, rotMat);

            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            //Debug.Log ("transformationM " + transformationM.ToString ());

            Rvec.Dispose();
            Tvec.Dispose();
            raux.Dispose();
            taux.Dispose();
            rotMat.Dispose();

            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
            //Debug.Log("arM " + ARM.ToString());

            if (ARGameObject != null)
            {
                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                if (deactivateCoroutine == null)
                {
                    deactivateCoroutine = StartCoroutine(Wait(10.0f));
                }
                ARGameObject.SetActive(true);
            }

            //end pose estimation

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <Point> listPoDefect = new List <Point>();

            if (convexDefect.rows() > 0)
            {
                List <int>   convexDefectList = convexDefect.toList();
                List <Point> contourList      = contour.toList();
                for (int j = 0; j < convexDefectList.Count; j = j + 4)
                {
                    Point farPoint = contourList[convexDefectList[j + 2]];
                    int   depth    = convexDefectList[j + 3];
                    //if (depth > threasholdSlider.value && farPoint.y < a)
                    //{
                    //    listPoDefect.Add(contourList[convexDefectList[j + 2]]);
                    //}
                    //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
                }
            }


            Debug.Log("hull: " + hull.toList());
            if (convexDefect.rows() > 0)
            {
                Debug.Log("defects: " + convexDefect.toList());
            }

            //use these contours to do heart detection
            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            int defectsTotal = (int)convexDefect.total();

            Debug.Log("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

            Debug.Log("numberOfFingers " + numberOfFingers);

            Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Example #12
0
    //public delegate void Process(int[] tgrdeteced);
    void tagramDetect(Mat t_rgbaMat, Action <TangramResultModel, List <MyShape> > prc)
    {
        List <MyShape> lms = new List <MyShape>();

        System.Diagnostics.Stopwatch watch = null;

        long elapsedMs;
        TangramResultModel trm = null;

        Observable.Start(() =>
        {
            mut.WaitOne();
            Imgproc.resize(t_rgbaMat, rgbaMat, new Size(nW_goc, nH_goc));
            watch = System.Diagnostics.Stopwatch.StartNew();

            if (warp != null)
            {
                warp.Init(rgbaMat);
                Mat wMat = warp.warpPerspective(rgbaMat);
                rgbaMat  = wMat.submat(0, nH, 0, nW);
            }
            else
            {
                rgbaMat = rgbaMat.submat(0, nH, 0, nW);
            }

            all_thresh      = Mat.zeros(nH, nW, CvType.CV_8UC3);
            all_thresh_afct = Mat.zeros(nH, nW, CvType.CV_8UC3);
            dbMat           = Mat.zeros(nH, nW, CvType.CV_8UC3);
            all_thresh_af   = Mat.zeros(nH, nW, CvType.CV_8UC3);

            rgbaMat.copyTo(rgbMat);
            rgbMat.convertTo(rgbMat2, CvType.CV_8UC3, 0.8, 60);
            rgbMat2.copyTo(rgbMat2copy);
            rgbMat.convertTo(rgbMat3, CvType.CV_8UC3, 1, 60);
            rgbMat.convertTo(rgbMat4, CvType.CV_8UC3, 1.25, 35);
            rgbMat.convertTo(rgbMat, CvType.CV_8UC3, 1.25, 35);


            Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat2, hsvMat2, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat3, hsvMat3, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat3, hsvMat4, Imgproc.COLOR_RGB2HSV);

            watch.Stop();
            elapsedMs = watch.ElapsedMilliseconds;

            Mat markers = Mat.zeros(rgbaMat.size(), CvType.CV_32SC1);

            watch = System.Diagnostics.Stopwatch.StartNew();

            for (int obj_i = 0; obj_i < ls_obj.Length; obj_i++)
            {
                var obj = ls_obj[obj_i];

                if (obj_i == (int)tgr.ORANGE | obj_i == (int)tgr.YELLOW | obj_i == (int)tgr.GREEN)
                {
                    Core.inRange(hsvMat2, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }
                else if (obj_i == (int)tgr.LIGHTBLUE)
                {
                    Core.inRange(hsvMat, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }
                else
                {
                    Core.inRange(hsvMat, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }


                if (obj_i == (int)tgr.RED)
                {
                    Core.inRange(hsvMat, new Scalar(0, 20, 45), new Scalar(5, 255, 255), thresholdMat2);
                    thresholdMat2.copyTo(thresholdMat, thresholdMat2);
                }


                thresholdMatArr[obj_i] = thresholdMat.clone();
            }

            //thresholdMatArr[(int)tgr.LIGHTBLUE].setTo(new Scalar(0), thresholdMatArr[(int)tgr.BLUE]);
            //thresholdMatArr[(int)tgr.LIGHTBLUE].setTo(new Scalar(0), thresholdMatArr[(int)tgr.GREEN]);


            for (int obj_i = 0; obj_i < ls_obj.Length; obj_i++)
            {
                var obj = ls_obj[obj_i];

                all_cts.Clear();
                thresholdMat = thresholdMatArr[obj_i];
                if (toggle_db[obj_i] == true)
                {
                    all_thresh.setTo(obj.ColorRGB, thresholdMat);
                }

                if (true | obj_i == (int)tgr.PURPLE | obj_i == (int)tgr.YELLOW | obj_i == (int)tgr.RED | obj_i == (int)tgr.GREEN | obj_i == (int)tgr.ORANGE)
                {
                    Imgproc.erode(thresholdMat, thresholdMat2, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)), new Point(-1, -1), 1);
                }
                if (obj_i == (int)tgr.LIGHTBLUE | obj_i == (int)tgr.PURPLE)
                {
                    Imgproc.erode(thresholdMat, thresholdMat2, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)), new Point(-1, -1), 1);
                }

                if (toggle_db[obj_i] == true)
                {
                    all_thresh_af.setTo(obj.ColorRGB, thresholdMat2);
                }
                all_thresh_afct.setTo(new Scalar(obj_i + 1), thresholdMat2);

                color_filter.Add(thresholdMat2.clone());

                Imgproc.findContours(thresholdMat2, all_cts, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
                Scalar c = obj.getColor();

                for (int ct_i = 0; ct_i < all_cts.Count; ct_i++)
                {
                    double area = Imgproc.contourArea(all_cts[ct_i]);
                    // if (area < MIN_OBJECT_AREA)
                    if (area < MIN_OBJECT_AREAS[obj_i] * 0.55)
                    {
                        all_cts.RemoveAt(ct_i);
                        ct_i--;
                    }
                    if (area > MAX_OBJECT_AREAS[obj_i] * 1.3)
                    {
                        all_cts.RemoveAt(ct_i);
                        ct_i--;
                    }
                }

                MyShape chon = null;
                MyShape ms   = new MyShape();
                float dt     = 1000000;

                for (int ct_i = 0; ct_i < all_cts.Count; ct_i++)
                {
                    var ct      = all_cts[ct_i];
                    var peri    = Imgproc.arcLength(new MatOfPoint2f(ct.toArray()), true);
                    var epsilon = 0.1 * peri;
                    if (obj_i == (int)tgr.ORANGE || obj_i == (int)tgr.YELLOW)
                    {
                        epsilon = 0.065 * peri;
                    }
                    Imgproc.approxPolyDP(new MatOfPoint2f(ct.toArray()), approx_ct, epsilon, true);

                    MatOfInt pts_cvh = new MatOfInt();
                    Imgproc.convexHull(ct, pts_cvh, true);
                    var cvh_numPts  = pts_cvh.toArray().Length;
                    Point[] cvh_pts = new Point[cvh_numPts];
                    var ct_pts      = ct.toArray();



                    for (int i = 0; i < cvh_numPts; i++)
                    {
                        var i1     = pts_cvh.toArray()[i];
                        var p1     = ct_pts[i1];
                        cvh_pts[i] = p1;

                        try
                        {
                            if (debug == true)
                            {
                                var i2 = pts_cvh.toArray()[(i + 1) % cvh_numPts];
                                var p2 = ct_pts[i2];
                                Imgproc.circle(rgbMat2, p1, 1, c, 2);
                            }
                        }
                        catch (Exception e)
                        {
                            Utilities.LogFormat("Here3:{0},{1},{2}", rgbMat2 == null, p1 == null, c == null);
                            Utilities.Log("Exception is {0}", e.ToString());
                            Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                        }
                    }


                    MatOfPoint2f approx_cvh = new MatOfPoint2f();

                    var epsilon2 = peri * 0.1;
                    if (obj_i == (int)tgr.ORANGE)
                    {
                        epsilon2 = peri * 0.065;
                    }
                    Imgproc.approxPolyDP(new MatOfPoint2f(cvh_pts), approx_cvh, epsilon2, true);

                    var ct_ori            = new MatOfPoint(ct.toArray());
                    MatOfPoint approx_ct2 = new MatOfPoint(approx_ct.toArray());

                    List <MatOfPoint> approx_cvh2 = new List <MatOfPoint>();
                    approx_cvh2.Add(new MatOfPoint(approx_cvh.toArray()));

                    var mu    = Imgproc.moments(approx_cvh2[0], true);
                    cterTgr.x = mu.m10 / mu.m00;
                    cterTgr.y = mu.m01 / mu.m00;

                    if (approx_ct2.size().height == 3 | approx_ct2.size().height == 4)
                    {
                        var points    = approx_cvh2[0].toArray();
                        var numpoints = points.Length;

                        ms._id = obj_i;
                        ms.ps  = new Point[numpoints];


                        double rat = 1.16;
                        if (obj_i == (int)tgr.PURPLE)
                        {
                            rat = 1.20;
                        }
                        else if (obj_i == (int)tgr.LIGHTBLUE)
                        {
                            rat = 1.20;
                        }
                        else if (obj_i == (int)tgr.RED | obj_i == (int)tgr.BLUE)
                        {
                            rat = 1.09;
                        }
                        else if (obj_i == (int)tgr.YELLOW)
                        {
                            rat = 1.10;
                        }
                        else if (obj_i == (int)tgr.ORANGE)
                        {
                            rat = 1.10;
                        }
                        else if (obj_i == (int)tgr.GREEN)
                        {
                            rat = 1.10;
                        }

                        var ind_huyen = 0;
                        var max       = -1d;

                        if (numpoints == 3 || numpoints == 4)
                        {
                            for (int p_i = 0; p_i < numpoints; p_i++)
                            {
                                var p  = points[p_i];
                                var p2 = points[(p_i + 1) % numpoints];

                                var vect = p - cterTgr;

                                vect = vect * rat;

                                var p_new     = cterTgr + vect;
                                points[p_i].x = (int)(p_new.x * 100) / 100f;
                                points[p_i].y = (int)(p_new.y * 100) / 100f;


                                if (numpoints == 4)
                                {
                                    ms.ps[p_i] = p_new;
                                }

                                if (numpoints == 3)
                                {
                                    var vt     = p2 - p;
                                    var length = vt.x * vt.x + vt.y * vt.y;
                                    if (length > max)
                                    {
                                        ind_huyen = p_i;
                                        max       = length;
                                    }
                                }
                            }
                        }

                        if (numpoints == 3)
                        {
                            var i_nhon1 = ind_huyen;
                            var i_nhon2 = (ind_huyen + 1) % numpoints;
                            var i_vuong = (ind_huyen + 2) % numpoints;

                            ms.ps[0] = points[i_vuong];
                            ms.ps[1] = points[i_nhon1];
                            ms.ps[2] = points[i_nhon2];
                        }
                        else if (numpoints == 4)
                        {
                            if (obj_i == (int)tgr.ORANGE)
                            {
                                var vt_cheo1   = ms.ps[0] - ms.ps[2];
                                var vt_cheo2   = ms.ps[1] - ms.ps[3];
                                var leng_cheo1 = vt_cheo1.x * vt_cheo1.x + vt_cheo1.y * vt_cheo1.y;
                                var leng_cheo2 = vt_cheo2.x * vt_cheo2.x + vt_cheo2.y * vt_cheo2.y;
                                var i_nhon     = 0;
                                if (leng_cheo2 > leng_cheo1)
                                {
                                    i_nhon = 1;
                                }

                                ms.ps[0] = points[i_nhon];
                                ms.ps[1] = points[(i_nhon + 1)];
                                ms.ps[2] = points[(i_nhon + 2)];
                                ms.ps[3] = points[(i_nhon + 3) % numpoints];

                                var i_prvNhon   = (i_nhon + 4 - 1) % numpoints;
                                var i_aftNhon   = i_nhon + 1;
                                var vt_prvNhon  = points[i_prvNhon] - points[i_nhon];
                                var vt_aftNhon  = points[i_aftNhon] - points[i_nhon];
                                var len_prvNhon = vt_prvNhon.x * vt_prvNhon.x + vt_prvNhon.y * vt_prvNhon.y;
                                var len_aftNhon = vt_aftNhon.x * vt_aftNhon.x + vt_aftNhon.y * vt_aftNhon.y;

                                Imgproc.line(dbMat, points[i_prvNhon], points[i_nhon], c, 1);

                                if (len_prvNhon > len_aftNhon)
                                {
                                    ms.isFlip = true;
                                    Imgproc.putText(dbMat, " IsFLIP", ms.ps[3], 1, 1, c, 1);
                                }
                                else
                                {
                                    ms.isFlip = false;
                                    Imgproc.putText(dbMat, " IsNOTFLIP", ms.ps[3], 1, 1, c, 1);
                                }
                            }
                        }

                        var centerMat = new Point(rgbMat.width() / 2f, rgbMat.height() / 2f);
                        var vtLech    = centerMat - cterTgr;
                        var dt2       = vtLech.x * vtLech.x + vtLech.y * vtLech.y;
                        if (dt2 < dt)
                        {
                            chon = ms;
                        }
                    }
                    try
                    {
                        Imgproc.circle(rgbMat, cterTgr, 1, c, 1);
                        Imgproc.putText(rgbMat, mu.m00.ToString(), cterTgr, 1, 1, c, 1);
                    }
                    catch (Exception e)
                    {
                        Utilities.LogFormat("Here2:{0},{1},{2}", rgbMat == null, cterTgr == null, c == null);
                        Utilities.Log("Exception is {0}", e.ToString());
                        Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                    }

                    //if (approx_ct2.size().height == 3 | approx_ct2.size().height == 4) break;
                }

                if (chon != null)
                {
                    lms.Add(chon);

                    var ps = chon.ps;
                    for (int i = 0; i < ps.Length; i++)
                    {
                        var p1 = ps[i];
                        var p2 = ps[(i + 1) % ps.Length];

                        try
                        {
                            Imgproc.line(rgbMat2, p1, p2, c, 1);
                            Imgproc.line(all_thresh_afct, p1, p2, new Scalar(255, 255, 255), 1);
                            Imgproc.line(dbMat, p1, p2, c, 1);
                            Imgproc.circle(dbMat, p1, 1, c);
                        }
                        catch (Exception e)
                        {
                            Utilities.LogFormat("Here1:{0},{1},{2}", rgbMat2 == null, p1 == null, p2 == null);
                            Utilities.Log("Exception is {0}", e.ToString());
                            Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                        }
                    }
                }

                watch.Stop();
                elapsedMs = watch.ElapsedMilliseconds;
            }

            TangramShape msl = new TangramShape();
            msl.datas        = lms;
            var json         = JsonUtility.ToJson(msl);

            watch = System.Diagnostics.Stopwatch.StartNew();
            trm   = tangramFeatureModelList.Detect(msl.datas.ToArray());
            watch.Stop();
            elapsedMs = watch.ElapsedMilliseconds;

            mut.ReleaseMutex();
        }).ObserveOnMainThread().Subscribe((rx) =>
        {
            prc(trm, lms);
            if (debug == true)
            {
                mut.WaitOne();

                if (texture != null && debug == true)
                {
                    Utils.matToTexture2D(dbMat, texture);
                }
                if (dbText1 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat2copy, dbText1);
                }
                if (dbText2 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat3, dbText2);
                }
                if (dbText3 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat4, dbText3);
                }
                if (dbText4 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat, dbText4);
                }

                all_thresh_afct = all_thresh_afct * 25;
                Imgproc.cvtColor(rgbMat2, rgbMat2, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(all_thresh, all_thresh, Imgproc.COLOR_RGBA2RGB);
                Mat a = new Mat(all_thresh.size(), CvType.CV_8UC3);
                Core.addWeighted(all_thresh, 0.2, rgbMat2, 0.8, 0, a);
                if (dbText5 != null && debug == true)
                {
                    Utils.matToTexture2D(a, dbText5);
                }
                if (dbText6 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh, dbText6);
                }
                if (dbText7 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh_afct, dbText7);
                }
                if (dbText8 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh_af, dbText8);
                }
                mut.ReleaseMutex();
            }
        });
    }
Example #13
0
    /// <summary>
    /// Hands the pose estimation process.
    /// </summary>
    public void handPoseEstimationProcess(Mat rgbaMat)
    {
        //Imgproc.blur(mRgba, mRgba, new Size(5,5));
        Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);
        //Imgproc.medianBlur(mRgba, mRgba, 3);

        if (!isColorSelected)
        {
            return;
        }

        List <MatOfPoint> contours = detector.getContours();

        detector.process(rgbaMat);
        //Debug.Log(contours + " | " + contours.Count);
        //string[] output = contours.ToArray();

        for (int i = 0; i < contours.Count; i++)
        {
            //Debug.Log("MatOfPoint2f " + new MatOfPoint2f(contours[i].toArray()) + " | " + i);
            //Debug.Log("MatOfPoint " + contours [i] + " | " + i);
            //Imgproc.circle(rgbaMat, contours[i], 6, new Scalar(0, 255, 0, 255), -1);


            //Debug.Log ("kotka" +  MatOfPoint.ReferenceEquals(x, y));
        }

        if (contours.Count <= 0)
        {
            return;
        }


        RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

        double boundWidth  = rect.size.width;
        double boundHeight = rect.size.height;
        int    boundPos    = 0;

        for (int i = 1; i < contours.Count; i++)
        {
            rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
            if (rect.size.width * rect.size.height > boundWidth * boundHeight)
            {
                boundWidth  = rect.size.width;
                boundHeight = rect.size.height;
                boundPos    = i;
            }
        }

        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
        Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);
        //tochkaX = boundRect.tl ().x;
        //tochkaY = boundRect.tl ().y;
        Imgproc.circle(rgbaMat, boundRect.tl(), 6, new Scalar(0, 255, 0, 255), -1);
        Imgproc.circle(rgbaMat, boundRect.br(), 6, new Scalar(0, 255, 0, 255), -1);
        pointbX = boundRect.br().x;
        pointbY = boundRect.br().y;
        pointaX = boundRect.x;
        pointbY = boundRect.y;
        double a = boundRect.br().y - boundRect.tl().y;

        a = a * 0.7;
        a = boundRect.tl().y + a;
        Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);
        MatOfPoint2f pointMat = new MatOfPoint2f();

        Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
        contours[boundPos] = new MatOfPoint(pointMat.toArray());
        MatOfInt  hull         = new MatOfInt();
        MatOfInt4 convexDefect = new MatOfInt4();

        Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
        if (hull.toArray().Length < 3)
        {
            return;
        }
        Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);
        List <MatOfPoint> hullPoints = new List <MatOfPoint>();
        List <Point>      listPo     = new List <Point>();

        for (int j = 0; j < hull.toList().Count; j++)
        {
            listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
        }
        MatOfPoint e = new MatOfPoint();

        e.fromList(listPo);
        hullPoints.Add(e);
        List <MatOfPoint> defectPoints = new List <MatOfPoint>();
        List <Point>      listPoDefect = new List <Point>();

        for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
        {
            Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
            int   depth    = convexDefect.toList()[j + 3];
            if (depth > 8700 && farPoint.y < a)
            {
                listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
            }
        }

        MatOfPoint e2 = new MatOfPoint();

        e2.fromList(listPo);
        defectPoints.Add(e2);
        Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);
        this.numberOfFingers = listPoDefect.Count;
        if (this.numberOfFingers > 5)
        {
            this.numberOfFingers = 5;
        }
        foreach (Point p in listPoDefect)
        {
            Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
        }
    }
Example #14
0
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            // rgbaMat.copyTo(mRgba);
            float DOWNSCALE_RATIO = 1.0f;

            if (enableDownScale)
            {
                mRgba           = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                // mRgba = rgbaMat;
                rgbaMat.copyTo(mRgba);
                DOWNSCALE_RATIO = 1.0f;
            }

            // Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(mRgba, mRgba, new Size(3, 3), 1, 1);
            // Imgproc.medianBlur(mRgba, mRgba, 3);


            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(mRgba);

            //            Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
            Imgproc.rectangle(mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //                    (int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");


            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //            Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            // Imgproc.rectangle(mRgba, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]] * DOWNSCALE_RATIO);
            }

            /*
             * MatOfPoint e = new MatOfPoint();
             * e.fromList(listPo);
             * hullPoints.Add(e);
             *
             * List<Point> listPoDefect = new List<Point>();
             *
             * if (convexDefect.rows() > 0)
             * {
             *  List<int> convexDefectList = convexDefect.toList();
             *  List<Point> contourList = contour.toList();
             *  for (int j = 0; j < convexDefectList.Count; j = j + 4)
             *  {
             *      Point farPoint = contourList[convexDefectList[j + 2]];
             *      int depth = convexDefectList[j + 3];
             *      if (depth > threshholdDetect && farPoint.y < a)
             *      {
             *          listPoDefect.Add(contourList[convexDefectList[j + 2]]);
             *          Imgproc.line(rgbaMat, farPoint, listPo[convexDefectList[j + 2]], new Scalar(255, 0, 0, 255),1,1);
             *      }
             *      //                    Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
             *  }
             * }*/


            //            Debug.Log ("hull: " + hull.toList ());
            //            if (convexDefect.rows () > 0) {
            //                Debug.Log ("defects: " + convexDefect.toList ());
            //            }

            //Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            for (int p = 0; p < listPo.Count; p++)
            {
                if (p % 2 == 0)
                {
                    Imgproc.circle(rgbaMat, listPo[p], 6, new Scalar(255, 0, 0, 255), -1);
                    // Imgproc.putText(rgbaMat,p.ToString(),listPo[p],1,1,new Scalar(255,0,0,255));

                    // check if close

                    List <Point> fLMscaled = OpenCVForUnityUtils.ConvertVector2ListToPointList(facePoints);

                    for (int q = 0; q < fLMscaled.Count; q++)
                    {
                        if (ifLessThanDPoint(listPo[p], fLMscaled[q], 8))
                        {
                            //Point1 = listPo[p];
                            //Point2 = fLMscaled[q];
                            handPoint = p;
                            facePoint = q;
                            print(Point1 + " " + Point2);
                        }
                    }

                    if (p == handPoint && facePoint != 0)
                    {
                        Point1 = listPo[p];
                        Point2 = fLMscaled[facePoint];
                        Imgproc.line(rgbaMat, Point1, Point2, new Scalar(255, 255, 255, 255));
                    }
                }
            }



            //            int defectsTotal = (int)convexDefect.total();
            //            Debug.Log ("Defect total " + defectsTotal);

            /*  numberOfFingers = listPoDefect.Count;
             * if (numberOfFingers > 5)
             *    numberOfFingers = 5;/
             *
             * //            Debug.Log ("numberOfFingers " + numberOfFingers);
             *
             * //            Imgproc.putText (rgbaMat, "" + numberOfFingers, new Point (rgbaMat.cols () / 2, rgbaMat.rows () / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
             *
             *
             * /*   foreach (Point p in listPoDefect) {
             *
             *    Point tempp = GetNearestL(p, listPo);
             *    tempp = ConvertDownscale(tempp, DOWNSCALE_RATIO);
             *    Point p2 = ConvertDownscale(p, DOWNSCALE_RATIO);
             *
             *    Imgproc.circle (rgbaMat, tempp, 6, new Scalar (0, 0, 255, 255), -1);
             *    Imgproc.circle(rgbaMat, p2, 6, new Scalar(255, 0, 255, 255), -1);
             * }*/
        }
        //手を検出して画像に描画する
        private static void _handPoseEstimationProcess(Mat rgbaMat, Color handColor)
        {
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);

            //検出器に色を設定
            detector.setHsvColor(HGColorSpuiter.ColorToScalar(handColor));

            List <MatOfPoint> contours = detector.getContours();

            detector.process(rgbaMat);
            if (contours.Count <= 0)
            {
                return;
            }

            //手の角度に傾いた外接矩形を作成
            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray()));
            //手首までの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(WristRangeColor), 2, 8, 0);

            double a = boundRect.br().y - boundRect.tl().y;

            a = a * 0.7;
            a = boundRect.tl().y + a;

            //手のひらの範囲を描画
            Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), HGColorSpuiter.ColorToScalar(PalmsRangeColor), 2, 8, 0);

            //折れ線カーブまたはポリゴンを,互いの距離が指定された精度以下になるように,より少ない頂点数のカーブやポリゴンで近似します
            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true);
            contours[boundPos] = new MatOfPoint(pointMat.toArray());

            //点とポリゴンの最短距離を計算
            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull);
            if (hull.toArray().Length < 3)
            {
                return;
            }
            Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect);

            //手の範囲を取得
            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contours[boundPos].toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            //手の範囲を描画
            Imgproc.drawContours(rgbaMat, hullPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

            //指と認識した場所を取得
            List <MatOfPoint> defectPoints = new List <MatOfPoint>();
            List <Point>      listPoDefect = new List <Point>();

            for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
            {
                Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]];
                int   depth    = convexDefect.toList()[j + 3];
                if (depth > depthThreashold && farPoint.y < a)
                {
                    listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]);
                }
            }

            MatOfPoint e2 = new MatOfPoint();

            e2.fromList(listPo);
            defectPoints.Add(e2);

            //検出した指の本数を更新
            numberOfFingers = listPoDefect.Count;
            if (numberOfFingers > 5)
            {
                numberOfFingers = 5;
            }

            //指の間に点を描画
            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, HGColorSpuiter.ColorToScalar(BetweenFingersColor), -1);
            }
        }
Example #16
0
// Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            frame = webCamTextureToMatHelper.GetMat();
            frame.copyTo(img_orig);

            drawing = img_orig.clone();

            int       lowThreshold = 50;// (int)200;// slider.value;
            const int ratio        = 1;
            const int kernel_size  = 3;

            Imgproc.cvtColor(img_orig, img_lab, Imgproc.COLOR_BGR2Lab);
            double omrSize = img_orig.cols() * img_orig.rows();

            Imgproc.cvtColor(img_orig, img_gray, Imgproc.COLOR_RGBA2GRAY);
            Imgproc.GaussianBlur(img_gray, img_gray, new Size(15, 15), 1.5, 1.5);       //Gaussian blur
            Imgproc.erode(img_gray, img_gray, new Mat(), new Point(-1, -1), 1);         //Erosion
                                                                                        // Imgproc.dilate(img_gray, img_gray, new Mat(), new Point(-1, -1), 10, 1, new Scalar(10));    //Dilation
            Imgproc.Canny(img_gray, img_edges, lowThreshold, lowThreshold * ratio, kernel_size, false);

            //Shape detection
            List <MatOfPoint> contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();
            Imgproc.findContours(img_edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

            //Texture2D tex = new Texture2D(img_edges.width(), img_edges.height(), TextureFormat.RGB24, false);
            //Utils.matToTexture2D(img_edges, tex);
            //byte[] bytes1 = tex.EncodeToJPG();
            //File.WriteAllBytes("D:/2019/OMR/" + "test213123.png", bytes1);

            List <MatOfPoint> hulls = new List <MatOfPoint>();

            for (int i = 0; i < contours.Count; i++)
            {
                MatOfInt hull_temp = new MatOfInt();
                Imgproc.convexHull(contours[i], hull_temp);
                int[]   arrIndex   = hull_temp.toArray();
                Point[] arrContour = contours[i].toArray();
                Point[] arrPoints  = new Point[arrIndex.Length];

                for (int k = 0; k < arrIndex.Length; k++)
                {
                    arrPoints[k] = arrContour[arrIndex[k]];
                }

                MatOfPoint temp = new MatOfPoint();
                temp.fromArray(arrPoints);

                //Filter outliers
                if (Imgproc.contourArea(temp) > omrSize / 3 && Imgproc.contourArea(temp) < (omrSize * 4) / 5)
                {
                    hulls.Add(temp);
                }
            }

            List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();
            for (int i = 0; i < hulls.Count; i++)
            {
                MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
                hull2f.Add(newPoint);
            }

            for (int i = 0; i < hulls.Count; i++)
            {
                //Approximate polygon
                MatOfPoint2f approx = new MatOfPoint2f();

                Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
                List <Point> approx_polygon = approx.toList();
                // approx_polygon = Scannerproc.filterPolygon(approx_polygon);
                // Debug.Log(approx_polygon.Count);
                if (!Scannerproc.isSquare(approx_polygon))
                {
                    continue;
                }
                else
                {
                    nowRectPoints.Clear();
                    nowRectPoints.AddRange(approx_polygon);
                    perspectiveAlign();
                }

                //Center of mass
                int cx = 0,
                    cy = 0;


                for (int k = 0; k < approx_polygon.Count; k++)
                {
                    cx += (int)approx_polygon[k].x;
                    cy += (int)approx_polygon[k].y;
                }
                cx /= approx_polygon.Count;
                cy /= approx_polygon.Count;

                Scannerproc.drawShape(drawing, approx_polygon, new Scalar(0, 255, 0));
            }

            if (showTextureOnScreen)
            {
                showCurrentTextureOnScreen();
            }
        }
    }
Example #17
0
        /*=============================================*
        * 輪郭ごとの頂点から手を判別するまで
        *=============================================*/
        /// <summary>
        /// Contours to hand gesture.
        /// </summary>
        /// <param name="rgbaMat">Rgba mat.</param>
        /// <param name="contour">Contour.</param>
        private static void _contourToHandGesture(Mat rgbaMat, MatOfPoint contour)
        {
            try
            {
                //頂点を調査する準備をする
                _pointOfVertices(rgbaMat, contour);

                //基準輪郭のサイズの取得と描画(長方形)
                OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
                Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(ContourRangeColor), 2, 8, 0);

                /*=============================================*
                 * 腕まで含んだ手の大きさを取得する
                 **=============================================*/
                //腕まで含んだ手の大きさを識別する
                MatOfInt hull = new MatOfInt();
                Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

                //腕まで含んだ手の範囲を取得
                List <Point> armPointList = new List <Point>();
                for (int j = 0; j < hull.toList().Count; j++)
                {
                    Point armPoint = contour.toList()[hull.toList()[j]];
                    bool  addFlag  = true;
                    foreach (Point point in armPointList.ToArray())
                    {
                        //輪郭の1/10より近い頂点は誤差としてまとめる
                        double distance = Mathf.Sqrt((float)((armPoint.x - point.x) * (armPoint.x - point.x) + (armPoint.y - point.y) * (armPoint.y - point.y)));
                        if (distance <= Mathf.Min((float)boundRect.width, (float)boundRect.height) / 10)
                        {
                            addFlag = false;
                            break;
                        }
                    }
                    if (addFlag)
                    {
                        armPointList.Add(armPoint);
                    }
                }

                MatOfPoint armMatOfPoint = new MatOfPoint();
                armMatOfPoint.fromList(armPointList);
                List <MatOfPoint> armPoints = new List <MatOfPoint>();
                armPoints.Add(armMatOfPoint);

                //腕まで含んだ手の範囲を描画
                Imgproc.drawContours(rgbaMat, armPoints, -1, HGColorSpuiter.ColorToScalar(ArmRangeColor), 3);

                //腕まで含んだ手が三角形の場合はそれ以上の識別が難しい
                if (hull.toArray().Length < 3)
                {
                    return;
                }

                /*=============================================*
                 * 掌の大きさを取得する
                 **=============================================*/
                //凸面の頂点から凹面の点のみを取得し、掌の範囲を取得する
                MatOfInt4 convexDefect = new MatOfInt4();
                Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

                //凹面の点をフィルタリングして取得
                List <Point> palmPointList = new List <Point>();
                for (int j = 0; j < convexDefect.toList().Count; j = j + 4)
                {
                    Point farPoint = contour.toList()[convexDefect.toList()[j + 2]];
                    int   depth    = convexDefect.toList()[j + 3];
                    if (depth > depthThreashold && farPoint.y < boundRect.br().y - boundRect.tl().y)
                    {
                        palmPointList.Add(contour.toList()[convexDefect.toList()[j + 2]]);
                    }
                }

                MatOfPoint palmMatOfPoint = new MatOfPoint();
                palmMatOfPoint.fromList(palmPointList);
                List <MatOfPoint> palmPoints = new List <MatOfPoint>();
                palmPoints.Add(palmMatOfPoint);

                //掌の範囲を描画
                Imgproc.drawContours(rgbaMat, palmPoints, -1, HGColorSpuiter.ColorToScalar(PalmRangeColor), 3);

                /*=============================================*
                 * 掌+指先の大きさを取得する
                 **=============================================*/
                //掌の位置を元に手首を除いた範囲を取得する
                List <Point> handPointList = new List <Point>();
                handPointList.AddRange(armPointList.ToArray());
                handPointList.Reverse();
                handPointList.RemoveAt(0);
                handPointList.Insert(0, palmPointList.ToArray()[0]);
                handPointList.RemoveAt(handPointList.Count - 1);
                handPointList.Insert(handPointList.Count, palmPointList.ToArray()[palmPointList.Count - 1]);

                MatOfPoint handMatOfPoint = new MatOfPoint();
                handMatOfPoint.fromList(handPointList);
                List <MatOfPoint> handPoints = new List <MatOfPoint>();
                handPoints.Add(handMatOfPoint);

                Imgproc.drawContours(rgbaMat, handPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3);

                /*=============================================*
                 * 指先の位置を取得する
                 **=============================================*/
                //掌の各頂点の中心を求める
                List <Point> palmCenterPoints = new List <Point>();
                for (int i = 0; i < palmPointList.Count; i++)
                {
                    Point palmPoint     = palmPointList.ToArray()[i];
                    Point palmPointNext = new Point();
                    if (i + 1 < palmPointList.Count)
                    {
                        palmPointNext = palmPointList.ToArray()[i + 1];
                    }
                    else
                    {
                        palmPointNext = palmPointList.ToArray()[0];
                    }

                    Point palmCenterPoint = new Point((palmPoint.x + palmPointNext.x) / 2, (palmPoint.y + palmPointNext.y) / 2);
                    palmCenterPoints.Add(palmCenterPoint);
                }

                //掌の頂点から最も近い手の頂点を求める
                for (int i = 0; i < palmCenterPoints.Count && i + 1 < handPointList.Count && i < 5; i++)
                {
                    Point palmPoint = palmCenterPoints.ToArray()[i];


                    List <Point> fingerList = new List <Point>();
                    fingerList.Add(palmPoint);
                    fingerList.Add(handPointList.ToArray()[i + 1]);

                    MatOfPoint fingerPoint = new MatOfPoint();
                    fingerPoint.fromList(fingerList);

                    List <MatOfPoint> fingerPoints = new List <MatOfPoint>();
                    fingerPoints.Add(fingerPoint);

                    Imgproc.drawContours(rgbaMat, fingerPoints, -1, HGColorSpuiter.ColorToScalar(FingerRangeColor), 3);
                }

//				Imgproc.putText(rgbaMat, "", new Point(2, rgbaMat.rows()-30), Core.FONT_HERSHEY_SIMPLEX, 1.0, HGColorSpuiter.ColorToScalar(Color.black), 2, Imgproc.LINE_AA, false);
            }
            catch (System.Exception e)
            {
                Debug.Log(e.Message);
            }
        }