// Use this for initialization void Start() { Mat imgMat = new Mat(500, 500, CvType.CV_8UC3, new Scalar(0, 0, 0)); Debug.Log("imgMat.ToString() " + imgMat.ToString()); int rand_num = 50; MatOfPoint pointsMat = new MatOfPoint(); pointsMat.alloc(rand_num); Core.randu(pointsMat, 100, 400); Point[] points = pointsMat.toArray(); for (int i = 0; i < rand_num; ++i) { Imgproc.circle(imgMat, points [i], 2, new Scalar(255, 255, 255), -1); } MatOfInt hullInt = new MatOfInt(); Imgproc.convexHull(pointsMat, hullInt); List <Point> pointMatList = pointsMat.toList(); List <int> hullIntList = hullInt.toList(); List <Point> hullPointList = new List <Point> (); for (int j = 0; j < hullInt.toList().Count; j++) { hullPointList.Add(pointMatList [hullIntList [j]]); } MatOfPoint hullPointMat = new MatOfPoint(); hullPointMat.fromList(hullPointList); List <MatOfPoint> hullPoints = new List <MatOfPoint> (); hullPoints.Add(hullPointMat); Imgproc.drawContours(imgMat, hullPoints, -1, new Scalar(0, 255, 0), 2); Imgproc.cvtColor(imgMat, imgMat, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
// Use this for initialization void Start () { Mat imgMat = new Mat (500, 500, CvType.CV_8UC3, new Scalar (0, 0, 0)); Debug.Log ("imgMat dst ToString " + imgMat.ToString ()); int rand_num = 50; MatOfPoint pointsMat = new MatOfPoint (); pointsMat.alloc (rand_num); Core.randu (pointsMat, 100, 400); Point[] points = pointsMat.toArray (); for (int i=0; i<rand_num; ++i) { Core.circle (imgMat, points [i], 2, new Scalar (255, 255, 255), -1); } MatOfInt hullInt = new MatOfInt (); Imgproc.convexHull (pointsMat, hullInt); List<Point> pointMatList = pointsMat.toList (); List<int> hullIntList = hullInt.toList (); List<Point> hullPointList = new List<Point> (); for (int j=0; j < hullInt.toList().Count; j++) { hullPointList.Add (pointMatList [hullIntList [j]]); } MatOfPoint hullPointMat = new MatOfPoint (); hullPointMat.fromList (hullPointList); List<MatOfPoint> hullPoints = new List<MatOfPoint> (); hullPoints.Add (hullPointMat); Imgproc.drawContours (imgMat, hullPoints, -1, new Scalar (0, 255, 0), 2); Imgproc.cvtColor (imgMat, imgMat, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D (imgMat, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; }
//辨識輪廓 private bool analysisContoursRect(int index, List <MatOfPoint> contours, Mat result, List <MatchObject> matchObject) { OpenCVForUnity.Rect _testDepthRect = Imgproc.boundingRect(contours[index]); float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth; if (_testDepthRect.area() > minAreaSize) { //宣告放置點資料 MatOfInt hullInt = new MatOfInt(); List <Point> hullPointList = new List <Point>(); MatOfPoint hullPointMat = new MatOfPoint(); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); MatOfInt4 defects = new MatOfInt4(); //篩選點資料 MatOfPoint2f Temp2f = new MatOfPoint2f(); //Convert contours(i) from MatOfPoint to MatOfPoint2f contours[index].convertTo(Temp2f, CvType.CV_32FC2); //Processing on mMOP2f1 which is in type MatOfPoint2f Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true); //Convert back to MatOfPoint and put the new values back into the contours list Temp2f.convertTo(contours[index], CvType.CV_32S); //计算轮廓围绕的凸形壳 Imgproc.convexHull(contours[index], hullInt); List <Point> pointMatList = contours[index].toList(); List <int> hullIntList = hullInt.toList(); for (int j = 0; j < hullInt.toList().Count; j++) { hullPointList.Add(pointMatList[hullIntList[j]]); hullPointMat.fromList(hullPointList); hullPoints.Add(hullPointMat); } if (hullInt.toList().Count == 4) { if (!setMatchObject(index, pointMatList, contours, hullPoints, result, matchObject)) { //Debug.Log("setMatchObject fail"); } } //清空記憶體 defects.Dispose(); hullPointList.Clear(); hullPointMat.Dispose(); hullInt.Dispose(); hullPoints.Clear(); return(true); } return(false); }
//手を検出して画像に描画する private static void _handPoseEstimationProcess(Mat rgbaMat, Color handColor) { Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1); //検出器に色を設定 detector.setHsvColor(HGColorSpuiter.ColorToScalar(handColor)); List <MatOfPoint> contours = detector.getContours(); detector.process(rgbaMat); if (contours.Count <= 0) { return; } //手の角度に傾いた外接矩形を作成 RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray())); //手首までの範囲を描画 Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(WristRangeColor), 2, 8, 0); double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; //手のひらの範囲を描画 Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), HGColorSpuiter.ColorToScalar(PalmsRangeColor), 2, 8, 0); //折れ線カーブまたはポリゴンを,互いの距離が指定された精度以下になるように,より少ない頂点数のカーブやポリゴンで近似します MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true); contours[boundPos] = new MatOfPoint(pointMat.toArray()); //点とポリゴンの最短距離を計算 MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect); //手の範囲を取得 List <MatOfPoint> hullPoints = new List <MatOfPoint>(); List <Point> listPo = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contours[boundPos].toList()[hull.toList()[j]]); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.Add(e); //手の範囲を描画 Imgproc.drawContours(rgbaMat, hullPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3); //指と認識した場所を取得 List <MatOfPoint> defectPoints = new List <MatOfPoint>(); List <Point> listPoDefect = new List <Point>(); for (int j = 0; j < convexDefect.toList().Count; j = j + 4) { Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]]; int depth = convexDefect.toList()[j + 3]; if (depth > depthThreashold && farPoint.y < a) { listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]); } } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.Add(e2); //検出した指の本数を更新 numberOfFingers = listPoDefect.Count; if (numberOfFingers > 5) { numberOfFingers = 5; } //指の間に点を描画 foreach (Point p in listPoDefect) { Imgproc.circle(rgbaMat, p, 6, HGColorSpuiter.ColorToScalar(BetweenFingersColor), -1); } }
/// <summary> /// Hands the pose estimation process. /// </summary> public void handPoseEstimationProcess(Mat rgbaMat) { //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) { return; } List <MatOfPoint> contours = detector.getContours(); detector.process(rgbaMat); // Debug.Log ("Contours count: " + contours.Count); if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours [boundPos].toArray())); Core.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); // Debug.Log ( // " Row start [" + // (int)boundRect.tl ().y + "] row end [" + // (int)boundRect.br ().y + "] Col start [" + // (int)boundRect.tl ().x + "] Col end [" + // (int)boundRect.br ().x + "]"); double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; // Debug.Log ( // " A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]"); //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 ); Core.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours [boundPos].toArray()), pointMat, 3, true); contours [boundPos] = new MatOfPoint(pointMat.toArray()); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours [boundPos].toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contours [boundPos].toArray()), hull, convexDefect); List <MatOfPoint> hullPoints = new List <MatOfPoint> (); List <Point> listPo = new List <Point> (); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contours [boundPos].toList() [hull.toList() [j]]); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.Add(e); List <MatOfPoint> defectPoints = new List <MatOfPoint> (); List <Point> listPoDefect = new List <Point> (); for (int j = 0; j < convexDefect.toList().Count; j = j + 4) { Point farPoint = contours [boundPos].toList() [convexDefect.toList() [j + 2]]; int depth = convexDefect.toList() [j + 3]; if (depth > threasholdSlider.value && farPoint.y < a) { listPoDefect.Add(contours [boundPos].toList() [convexDefect.toList() [j + 2]]); } // Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]); } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.Add(e2); // Debug.Log ("hull: " + hull.toList ()); // Debug.Log ("defects: " + convexDefect.toList ()); Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); // int defectsTotal = (int)convexDefect.total(); // Debug.Log ("Defect total " + defectsTotal); this.numberOfFingers = listPoDefect.Count; if (this.numberOfFingers > 5) { this.numberOfFingers = 5; } // Debug.Log ("numberOfFingers " + numberOfFingers); // Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false); numberOfFingersText.text = numberOfFingers.ToString(); foreach (Point p in listPoDefect) { Core.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1); } }
/// <summary> /// Hands the pose estimation process. /// </summary> public void handPoseEstimationProcess(Mat rgbaMat) { //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur (rgbaMat, rgbaMat, new OpenCVForUnity.Size (3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) return; List<MatOfPoint> contours = detector.getContours (); detector.process (rgbaMat); // Debug.Log ("Contours count: " + contours.Count); if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [0].toArray ())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [i].toArray ())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } OpenCVForUnity.Rect boundRect = Imgproc.boundingRect (new MatOfPoint (contours [boundPos].toArray ())); Imgproc.rectangle (rgbaMat, boundRect.tl (), boundRect.br (), CONTOUR_COLOR_WHITE, 2, 8, 0); // Debug.Log ( // " Row start [" + // (int)boundRect.tl ().y + "] row end [" + // (int)boundRect.br ().y + "] Col start [" + // (int)boundRect.tl ().x + "] Col end [" + // (int)boundRect.br ().x + "]"); double a = boundRect.br ().y - boundRect.tl ().y; a = a * 0.7; a = boundRect.tl ().y + a; // Debug.Log ( // " A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]"); //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 ); Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f (); Imgproc.approxPolyDP (new MatOfPoint2f (contours [boundPos].toArray ()), pointMat, 3, true); contours [boundPos] = new MatOfPoint (pointMat.toArray ()); MatOfInt hull = new MatOfInt (); MatOfInt4 convexDefect = new MatOfInt4 (); Imgproc.convexHull (new MatOfPoint (contours [boundPos].toArray ()), hull); if (hull.toArray ().Length < 3) return; Imgproc.convexityDefects (new MatOfPoint (contours [boundPos] .toArray ()), hull, convexDefect); List<MatOfPoint> hullPoints = new List<MatOfPoint> (); List<Point> listPo = new List<Point> (); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add (contours [boundPos].toList () [hull.toList () [j]]); } MatOfPoint e = new MatOfPoint (); e.fromList (listPo); hullPoints.Add (e); List<MatOfPoint> defectPoints = new List<MatOfPoint> (); List<Point> listPoDefect = new List<Point> (); for (int j = 0; j < convexDefect.toList().Count; j = j+4) { Point farPoint = contours [boundPos].toList () [convexDefect.toList () [j + 2]]; int depth = convexDefect.toList () [j + 3]; if (depth > threasholdSlider.value && farPoint.y < a) { listPoDefect.Add (contours [boundPos].toList () [convexDefect.toList () [j + 2]]); } // Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]); } MatOfPoint e2 = new MatOfPoint (); e2.fromList (listPo); defectPoints.Add (e2); // Debug.Log ("hull: " + hull.toList ()); // Debug.Log ("defects: " + convexDefect.toList ()); Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); // int defectsTotal = (int)convexDefect.total(); // Debug.Log ("Defect total " + defectsTotal); this.numberOfFingers = listPoDefect.Count; if (this.numberOfFingers > 5) this.numberOfFingers = 5; // Debug.Log ("numberOfFingers " + numberOfFingers); // Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false); numberOfFingersText.text = numberOfFingers.ToString (); foreach (Point p in listPoDefect) { Imgproc.circle (rgbaMat, p, 6, new Scalar (255, 0, 255, 255), -1); } }
//利用深度的輪廓做RGB的顏色判斷 public Mat getContours(Mat srcColorMat, Mat srcDepthMat) { Mat ColorMat = new Mat(); Mat DepthMat = new Mat(); Mat HsvMat = new Mat(); srcColorMat.copyTo(ColorMat); srcDepthMat.copyTo(DepthMat); Imgproc.cvtColor(ColorMat, HsvMat, Imgproc.COLOR_BGR2HSV); List <ColorObject> colorObjects = new List <ColorObject>(); Mat resultMat = new Mat(DepthMat.height(), DepthMat.width(), CvType.CV_8UC1); Mat hierarchy = new Mat(); List <Point> ConsistP = new List <Point>(); List <MatOfPoint> contours = new List <MatOfPoint>(); List <List <Point> > trianglePointList = new List <List <Point> >(); Imgproc.findContours(DepthMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); int numObjects = contours.Count; List <Scalar> clickRGB = new List <Scalar>(); List <Scalar> clickHSV = new List <Scalar>(); List <int> HullCountList = new List <int>(); for (int i = 0; i < numObjects; i++) { Imgproc.drawContours(resultMat, contours, i, new Scalar(255), 1); } double[] GetRGB = new double[10]; float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth; if (numObjects > 0) { for (int index = 0; index < numObjects; index++) { OpenCVForUnity.Rect R0 = Imgproc.boundingRect(contours[index]); if (R0.area() > minAreaSize) { //宣告放置點資料 MatOfInt hullInt = new MatOfInt(); List <Point> hullPointList = new List <Point>(); MatOfPoint hullPointMat = new MatOfPoint(); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); MatOfInt4 defects = new MatOfInt4(); //篩選點資料 MatOfPoint2f Temp2f = new MatOfPoint2f(); //Convert contours(i) from MatOfPoint to MatOfPoint2f contours[index].convertTo(Temp2f, CvType.CV_32FC2); //Processing on mMOP2f1 which is in type MatOfPoint2f Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true); //Convert back to MatOfPoint and put the new values back into the contours list Temp2f.convertTo(contours[index], CvType.CV_32S); //计算轮廓围绕的凸形壳 Imgproc.convexHull(contours[index], hullInt); List <Point> pointMatList = contours[index].toList(); List <int> hullIntList = hullInt.toList(); for (int j = 0; j < hullInt.toList().Count; j++) { hullPointList.Add(pointMatList[hullIntList[j]]); hullPointMat.fromList(hullPointList); hullPoints.Add(hullPointMat); } ConsistP.Add(new Point(R0.x, R0.y)); ConsistP.Add(new Point(R0.x + R0.width, R0.y + R0.height)); ConsistP.Add(new Point(R0.x + R0.width, R0.y)); ConsistP.Add(new Point(R0.x, R0.y + R0.height)); clickRGB.Add(clickcolor(ColorMat, R0)); clickHSV.Add(clickcolor(HsvMat, R0)); HullCountList.Add(hullIntList.Count); trianglePointList.Add(pointMatList); //清空記憶體 defects.Dispose(); hullPointList.Clear(); hullPointMat.Dispose(); hullInt.Dispose(); hullPoints.Clear(); //Debug.Log("ID = " + index + " Color = " + clickcolor(ColorMat, R0)); } } //使用顏色找尋物體 _matchColorObjectList = setColorMatchObject(ConsistP, trianglePointList, clickRGB, clickHSV, resultMat, HullCountList); } return(resultMat); }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); Mat img = Imgcodecs.imread(image_filepath, Imgcodecs.IMREAD_COLOR); if (img.empty()) { Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0)); } //Adust Quad.transform.localScale. gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } Net detector = null; Net recognizer = null; if (string.IsNullOrEmpty(detectionmodel_filepath) || string.IsNullOrEmpty(recognitionmodel_filepath)) { Debug.LogError(detectionmodel_filepath + " or " + recognitionmodel_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } else { detector = Dnn.readNet(detectionmodel_filepath); recognizer = Dnn.readNet(recognitionmodel_filepath); } if (detector == null || recognizer == null) { Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } else { TickMeter tickMeter = new TickMeter(); List <Mat> outs = new List <Mat>(); List <string> outNames = new List <string>(); outNames.Add("feature_fusion/Conv_7/Sigmoid"); outNames.Add("feature_fusion/concat_3"); // Create a 4D blob from a frame. Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(), inpHeight > 0 ? inpHeight : img.rows()); Mat blob = Dnn.blobFromImage(img, 1.0, inpSize, new Scalar(123.68, 116.78, 103.94), true, false); // blobFromImage(frame, blob, 1.0, Size(inpWidth, inpHeight), Scalar(123.68, 116.78, 103.94), true, false); // Run detection model. detector.setInput(blob); tickMeter.start(); detector.forward(outs, outNames); tickMeter.stop(); Mat scores = outs[0]; Mat geometry = outs[1]; // Decode predicted bounding boxes. List <RotatedRect> boxes = new List <RotatedRect>(); List <float> confidences = new List <float>(); decodeBoundingBoxes(scores, geometry, confThreshold, boxes, confidences); // Apply non-maximum suppression procedure. MatOfRotatedRect boxesMat = new MatOfRotatedRect(boxes.ToArray()); MatOfFloat confidencesMat = new MatOfFloat(confidences.ToArray()); MatOfInt indicesMat = new MatOfInt(); Dnn.NMSBoxesRotated(boxesMat, confidencesMat, confThreshold, nmsThreshold, indicesMat); List <int> indices = indicesMat.toList(); Point ratio = new Point(img.cols() / inpWidth, img.rows() / inpHeight); // Render text. for (int i = 0; i < indices.Count; ++i) { RotatedRect box = boxes[indices[i]]; Point[] vertices = new Point[4]; box.points(vertices); for (int j = 0; j < 4; ++j) { vertices[j].x *= ratio.x; vertices[j].y *= ratio.y; } for (int j = 0; j < 4; ++j) { Imgproc.line(img, vertices[j], vertices[(j + 1) % 4], new Scalar(0, 255, 0), 1); } if (recognizer != null) { Mat cropped = new Mat(); fourPointsTransform(img, vertices, cropped); //Debug.Log(cropped); Imgproc.cvtColor(cropped, cropped, Imgproc.COLOR_BGR2GRAY); Mat blobCrop = Dnn.blobFromImage(cropped, 1.0 / 127.5, new Size(), Scalar.all(127.5)); recognizer.setInput(blobCrop); //Debug.Log(blobCrop); tickMeter.start(); Mat result = recognizer.forward(); tickMeter.stop(); string wordRecognized; decodeText(result, out wordRecognized); Imgproc.putText(img, wordRecognized, vertices[1], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 0, 0), 1, Imgproc.LINE_AA, false); Debug.Log(wordRecognized); cropped.Dispose(); blobCrop.Dispose(); result.Dispose(); } } Debug.Log("Inference time, ms: " + tickMeter.getTimeMilli()); for (int i = 0; i < outs.Count; i++) { outs[i].Dispose(); } blob.Dispose(); detector.Dispose(); recognizer.Dispose(); } Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; Utils.setDebugMode(false); }
void Run() { rgbaMat = Imgcodecs.imread(Application.dataPath + "/Resources/changer.jpg", 1); Imgproc.cvtColor(rgbaMat, rgbaMat, Imgproc.COLOR_BGR2RGBA); //1. 人脸dlib检测 List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect>(); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (UnityEngine.Rect unityRect in result) { detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> >(); OpenCVForUnity.Rect openCVRect = detectResult[0]; UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //通过检测器从rect中提取point landmarkPoints.Add(points); //2. 计算凸包 pointList = new List <Point>(); for (int i = 0; i < points.Count; i++) { //绘制点 //Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 2, new Scalar(255, 255, 255), -1); Point pt = new Point(landmarkPoints[0][i].x, landmarkPoints[0][i].y); Imgproc.circle(rgbaMat, pt, 0, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_8, 0); //绘制68点 pointList.Add(pt); } //Debug.Log(pointList.Count); //68 //3. 三角剖份 TriangleDivide(); //4. (遍历三角形)仿射变换 Affine(); //5. 显示 MatOfPoint pointsMat = new MatOfPoint(); pointsMat.fromList(pointList); MatOfInt hullInt = new MatOfInt(); Imgproc.convexHull(pointsMat, hullInt); List <Point> pointMatList = pointsMat.toList(); List <int> hullIntList = hullInt.toList(); List <Point> hullPointList = new List <Point>(); for (int j = 0; j < hullInt.toList().Count; j++) { hullPointList.Add(pointMatList[hullIntList[j]]); } MatOfPoint hullPointMat = new MatOfPoint(); hullPointMat.fromList(hullPointList); //23*1 List <MatOfPoint> hullPoints = new List <MatOfPoint>(); hullPoints.Add(hullPointMat); //1 Imgproc.drawContours(rgbaMat, hullPoints, -1, new Scalar(255, 255, 0, 255), 2); //------------------------------------------------// //Try---------------------------------------------// dstPointList = new List <Point>(); for (int i = 0; i < pointList.Count; i++) { Point pt = pointList[i] + new Point(100, 0); dstPointList.Add(pt); } //MatOfPoint2f srcTri = new MatOfPoint2f(); //不能超过3 //MatOfPoint2f dstTri = new MatOfPoint2f(); //不能超过3 //srcTri.fromList(pointList); //srcTri.fromList(dstPointList); for (int j = 0; j < 3; j++) { //srcTri.push_back(hullPointMat); //dstTri.push_back(hull2[corpd.index[j]]); } Mat mask = Mat.zeros(rgbaMat.size(), CvType.CV_8UC1); Point p0 = new Point(0, 0); Point p1 = new Point(0, 256); Point p2 = new Point(256, 0); Point p3 = new Point(256, 0); Point p4 = new Point(512, 0); Point p5 = new Point(512, 256); Point p6 = new Point(256, 64); MatOfPoint pts1 = new MatOfPoint(new Point[3] { p0, p1, p2 }); MatOfPoint pts2 = new MatOfPoint(new Point[3] { p3, p4, p5 }); MatOfPoint2f srcTri = new MatOfPoint2f(new Point[3] { p0, p1, p2 }); MatOfPoint2f dstTri = new MatOfPoint2f(new Point[3] { p0, p1, p6 }); List <MatOfPoint> contour = new List <MatOfPoint>() { pts1 }; for (int i = 0; i < contour.Count; i++) { //轮廓提取 Imgproc.drawContours(mask, contour, i, new Scalar(255), -1); //全部放到mask上 } rgbaMat.copyTo(mask, mask); Mat warpMat = Imgproc.getAffineTransform(srcTri, dstTri); Mat warpImage = Mat.zeros(mask.size(), mask.type()); Imgproc.warpAffine(mask, warpImage, warpMat, warpImage.size()); //------------------------------------------------// Texture2D t2d = new Texture2D(rgbaMat.width(), rgbaMat.height(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, t2d); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); srcImage.sprite = sp; srcImage.preserveAspect = true; //warpImage Texture2D dst_t2d = new Texture2D(warpImage.width(), warpImage.height(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(warpImage, dst_t2d); Sprite dst_sp = Sprite.Create(dst_t2d, new UnityEngine.Rect(0, 0, dst_t2d.width, dst_t2d.height), Vector2.zero); dstImage.sprite = dst_sp; dstImage.preserveAspect = true; }
private void HandPoseEstimationProcess(Mat rgbaMat) { //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) { return; } List <MatOfPoint> contours = detector.GetContours(); detector.Process(rgbaMat); //Debug.Log ("Contours count: " + contours.Count); if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } MatOfPoint contour = contours[boundPos]; OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray())); Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); // Debug.Log ( // " Row start [" + //(int)boundRect.tl ().y + "] row end [" + // (int)boundRect.br ().y + "] Col start [" + // (int)boundRect.tl ().x + "] Col end [" + // (int)boundRect.br ().x + "]"); Point bottomLeft = new Point(boundRect.x, boundRect.y + boundRect.height); Point topLeft = new Point(boundRect.x, boundRect.y); Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height); Point topRight = new Point(boundRect.x + boundRect.width, boundRect.y); rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y), //topleft new Point(boundRect.x + boundRect.width, boundRect.y), //Top Right new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right new Point(boundRect.x, boundRect.y + boundRect.height) //Bottom Left ); //double a = boundRect.br ().y - boundRect.tl ().y; //a = a * 0.7; //a = boundRect.tl ().y + a; //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]"); //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0); List <Point3> m_markerCorners3dList = new List <Point3>(); m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A) m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B) m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C) m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D) m_markerCorners3d.fromList(m_markerCorners3dList); //estimate pose Mat Rvec = new Mat(); Mat Tvec = new Mat(); Mat raux = new Mat(); Mat taux = new Mat(); Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux); raux.convertTo(Rvec, CvType.CV_32F); taux.convertTo(Tvec, CvType.CV_32F); rotMat = new Mat(3, 3, CvType.CV_64FC1); Calib3d.Rodrigues(Rvec, rotMat); transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0])); transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0])); transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); //Debug.Log ("transformationM " + transformationM.ToString ()); Rvec.Dispose(); Tvec.Dispose(); raux.Dispose(); taux.Dispose(); rotMat.Dispose(); ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM; //Debug.Log("arM " + ARM.ToString()); if (ARGameObject != null) { ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); if (deactivateCoroutine == null) { deactivateCoroutine = StartCoroutine(Wait(10.0f)); } ARGameObject.SetActive(true); } //end pose estimation MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true); contour = new MatOfPoint(pointMat.toArray()); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); List <Point> listPo = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contour.toList()[hull.toList()[j]]); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.Add(e); List <Point> listPoDefect = new List <Point>(); if (convexDefect.rows() > 0) { List <int> convexDefectList = convexDefect.toList(); List <Point> contourList = contour.toList(); for (int j = 0; j < convexDefectList.Count; j = j + 4) { Point farPoint = contourList[convexDefectList[j + 2]]; int depth = convexDefectList[j + 3]; //if (depth > threasholdSlider.value && farPoint.y < a) //{ // listPoDefect.Add(contourList[convexDefectList[j + 2]]); //} //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]); } } Debug.Log("hull: " + hull.toList()); if (convexDefect.rows() > 0) { Debug.Log("defects: " + convexDefect.toList()); } //use these contours to do heart detection Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); int defectsTotal = (int)convexDefect.total(); Debug.Log("Defect total " + defectsTotal); this.numberOfFingers = listPoDefect.Count; if (this.numberOfFingers > 5) { this.numberOfFingers = 5; } Debug.Log("numberOfFingers " + numberOfFingers); Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false); numberOfFingersText.text = numberOfFingers.ToString(); foreach (Point p in listPoDefect) { Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1); } }
/// <summary> /// Hands the pose estimation process. /// </summary> public void handPoseEstimationProcess(Mat rgbaMat) { //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) { return; } List <MatOfPoint> contours = detector.getContours(); detector.process(rgbaMat); //Debug.Log(contours + " | " + contours.Count); //string[] output = contours.ToArray(); for (int i = 0; i < contours.Count; i++) { //Debug.Log("MatOfPoint2f " + new MatOfPoint2f(contours[i].toArray()) + " | " + i); //Debug.Log("MatOfPoint " + contours [i] + " | " + i); //Imgproc.circle(rgbaMat, contours[i], 6, new Scalar(0, 255, 0, 255), -1); //Debug.Log ("kotka" + MatOfPoint.ReferenceEquals(x, y)); } if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray())); Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); //tochkaX = boundRect.tl ().x; //tochkaY = boundRect.tl ().y; Imgproc.circle(rgbaMat, boundRect.tl(), 6, new Scalar(0, 255, 0, 255), -1); Imgproc.circle(rgbaMat, boundRect.br(), 6, new Scalar(0, 255, 0, 255), -1); pointbX = boundRect.br().x; pointbY = boundRect.br().y; pointaX = boundRect.x; pointbY = boundRect.y; double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true); contours[boundPos] = new MatOfPoint(pointMat.toArray()); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); List <Point> listPo = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contours[boundPos].toList()[hull.toList()[j]]); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.Add(e); List <MatOfPoint> defectPoints = new List <MatOfPoint>(); List <Point> listPoDefect = new List <Point>(); for (int j = 0; j < convexDefect.toList().Count; j = j + 4) { Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]]; int depth = convexDefect.toList()[j + 3]; if (depth > 8700 && farPoint.y < a) { listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]); } } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.Add(e2); Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); this.numberOfFingers = listPoDefect.Count; if (this.numberOfFingers > 5) { this.numberOfFingers = 5; } foreach (Point p in listPoDefect) { Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1); } }
void Process(){ Mat hierarchy = new Mat (); List<MatOfPoint> contours = new List<MatOfPoint> (); MatOfPoint maxitem = new MatOfPoint (); MatOfInt hullInt = new MatOfInt (); frameclone = frame_thresh_final.clone (); Imgproc.findContours (frameclone, contours, hierarchy, Imgproc.RETR_LIST , Imgproc.CHAIN_APPROX_NONE); maxitem = contours[0]; n = 0; for(int i=0; i<contours.Count; i++){ if(contours[i].total() > maxitem.total()){ maxitem = contours[i]; n=i; } } OpenCVForUnity.Rect bRect = Imgproc.boundingRect (maxitem); int bRect_height = bRect.height; int bRect_width = bRect.width; Imgproc.drawContours(frame_thresh_final, contours, n, new Scalar(255, 255, 255), -1); Imgproc.convexHull( maxitem, hullInt); List<Point> maxitemPointList = maxitem.toList (); List<int> hullIntList = hullInt.toList (); List<Point> hullPointList = new List<Point> (); for (int j=0; j < hullInt.toList().Count; j++) { hullPointList.Add (maxitemPointList [hullIntList[j]]); } MatOfPoint hullPointMat = new MatOfPoint (); hullPointMat.fromList (hullPointList); List<MatOfPoint> hullPoints = new List<MatOfPoint> (); hullPoints.Add (hullPointMat); //Imgproc.drawContours (frame, hullPoints, -1, new Scalar (0, 255, 0), 2); MatOfInt4 convexityDef = new MatOfInt4 (); Imgproc.convexityDefects (maxitem, hullInt, convexityDef); List<int> conDefIntList = convexityDef.toList (); List<Point> startpts = new List<Point> (); List<Point> farpts = new List<Point> (); List<Point> endpts = new List<Point> (); double defx1 = 1000, defx2 = 1000; int countx1 = 0, countx2 = 0; int tolerance = (int)(bRect_height/5.5); int count = 0, index = 0; //Debug.Log ("Tolerance: " + tolerance); double angleTol = 95.0; int[] defarray = new int[100]; //CvFont font = new CvFont (FontFace.Vector0, 1.0, 1.0); for(int i=0; i < conDefIntList.Count/4 ; i++){ startpts.Add(maxitemPointList[conDefIntList[4*i]]); endpts.Add(maxitemPointList[conDefIntList[4*i+1]]); farpts.Add(maxitemPointList[conDefIntList[4*i+2]]); Point s = startpts[i]; Point e = endpts[i]; Point f = farpts[i]; if( GetAngle(s, f, e) < angleTol && GetDistance(s,f) > tolerance && GetDistance(e,f) > tolerance ){ //string text = Convert.ToString(count); //Debug.Log("Depth1: "+GetDistance(s,f)); //Debug.Log("Depth2: "+GetDistance(e,f)); //Core.circle( frame_pot, f, 10, new Scalar(0, 0, 255), -1); //Core.circle( frame_pot, s, 10, new Scalar(0, 255, 0), -1); //Core.circle( frame_pot, e, 10, new Scalar(255, 0, 0), -1); //Core.putText(frame_pot, text, f, Core.FONT_HERSHEY_COMPLEX , 1.0, new Scalar(255, 255, 255)); //frame_pot.PutText(text, f, font, CvColor.White); if(f.x < defx1){ defx2 = defx1; countx2 = countx1; defx1 = f.x; countx1 = count; } else if(f.x < defx2) { defx2 = f.x; countx2 = count; } defarray[count] = index; count++; } index++; } //Debug.Log ("Count: " + count); //Debug.Log ("Total: " + farpts.Count); Point point1 = farpts [defarray [countx1]]; Point point2 = farpts [defarray [countx2]]; //Core.circle (frame_pot, point1, 15, new Scalar (255, 0, 0), 2); //Core.circle (frame_pot, point2, 15, new Scalar (255, 0, 0), 2); point1.y -= 5; double posX = (point1.x + point2.x)/2.0; double posY = (point1.y + point2.y)/2.0; posX_new = (float)(posX - 240); posY_new = (float)(-posY + 320); double dist = Math.Sqrt(Math.Pow(point1.x - point2.x, 2) + Math.Pow(point1.y - point2.y, 2)); scale1 = dist * 500000 / 640.0; scale2 = dist * 700 / 640.0; scale3 = dist * 600 / 640.0; scale4 = dist * 15 / 640.0; scale5 = dist * 70 / 640.0; ringObj[0].transform.position = new Vector3(posX_new, posY_new, 0.0f); ringObj[1].transform.position = new Vector3(posX_new, posY_new, 0.0f); ringObj[2].transform.position = new Vector3(posX_new, posY_new, 0.0f); ringObj[3].transform.position = new Vector3(posX_new, posY_new, 0.0f); ringObj[4].transform.position = new Vector3(posX_new, posY_new, 0.0f); ringObj[0].transform.localScale = new Vector3((float)scale1, (float)scale1, (float)(scale1*1.5)); ringObj[1].transform.localScale = new Vector3((float)scale2, (float)scale2, (float)(scale2)); ringObj[2].transform.localScale = new Vector3((float)scale3, (float)scale3, (float)(scale3)); ringObj[3].transform.localScale = new Vector3((float)scale4, (float)scale4, (float)(scale4)); ringObj[4].transform.localScale = new Vector3((float)scale5, (float)scale5, (float)(scale5)); Point point3 = new Point(point1.x, point2.y); angle_rot = GetAngle( point1, point2, point3); ringObj[0].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot); ringObj[1].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot); ringObj[2].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot); ringObj[3].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot); ringObj[4].transform.RotateAround( new Vector3(posX_new, posY_new, 0.0f), Vector3.forward, (float)angle_rot); }
private void HandPoseEstimationProcess(Mat rgbaMat) { // rgbaMat.copyTo(mRgba); float DOWNSCALE_RATIO = 1.0f; if (enableDownScale) { mRgba = imageOptimizationHelper.GetDownScaleMat(rgbaMat); DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio; } else { // mRgba = rgbaMat; rgbaMat.copyTo(mRgba); DOWNSCALE_RATIO = 1.0f; } // Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(mRgba, mRgba, new Size(3, 3), 1, 1); // Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) { return; } List <MatOfPoint> contours = detector.GetContours(); detector.Process(mRgba); // Debug.Log ("Contours count: " + contours.Count); if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } MatOfPoint contour = contours[boundPos]; OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray())); Imgproc.rectangle(mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); // Debug.Log ( // " Row start [" + // (int)boundRect.tl ().y + "] row end [" + // (int)boundRect.br ().y + "] Col start [" + // (int)boundRect.tl ().x + "] Col end [" + // (int)boundRect.br ().x + "]"); double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; // Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]"); // Imgproc.rectangle(mRgba, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true); contour = new MatOfPoint(pointMat.toArray()); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); List <Point> listPo = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contour.toList()[hull.toList()[j]] * DOWNSCALE_RATIO); } /* * MatOfPoint e = new MatOfPoint(); * e.fromList(listPo); * hullPoints.Add(e); * * List<Point> listPoDefect = new List<Point>(); * * if (convexDefect.rows() > 0) * { * List<int> convexDefectList = convexDefect.toList(); * List<Point> contourList = contour.toList(); * for (int j = 0; j < convexDefectList.Count; j = j + 4) * { * Point farPoint = contourList[convexDefectList[j + 2]]; * int depth = convexDefectList[j + 3]; * if (depth > threshholdDetect && farPoint.y < a) * { * listPoDefect.Add(contourList[convexDefectList[j + 2]]); * Imgproc.line(rgbaMat, farPoint, listPo[convexDefectList[j + 2]], new Scalar(255, 0, 0, 255),1,1); * } * // Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]); * } * }*/ // Debug.Log ("hull: " + hull.toList ()); // if (convexDefect.rows () > 0) { // Debug.Log ("defects: " + convexDefect.toList ()); // } //Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); for (int p = 0; p < listPo.Count; p++) { if (p % 2 == 0) { Imgproc.circle(rgbaMat, listPo[p], 6, new Scalar(255, 0, 0, 255), -1); // Imgproc.putText(rgbaMat,p.ToString(),listPo[p],1,1,new Scalar(255,0,0,255)); // check if close List <Point> fLMscaled = OpenCVForUnityUtils.ConvertVector2ListToPointList(facePoints); for (int q = 0; q < fLMscaled.Count; q++) { if (ifLessThanDPoint(listPo[p], fLMscaled[q], 8)) { //Point1 = listPo[p]; //Point2 = fLMscaled[q]; handPoint = p; facePoint = q; print(Point1 + " " + Point2); } } if (p == handPoint && facePoint != 0) { Point1 = listPo[p]; Point2 = fLMscaled[facePoint]; Imgproc.line(rgbaMat, Point1, Point2, new Scalar(255, 255, 255, 255)); } } } // int defectsTotal = (int)convexDefect.total(); // Debug.Log ("Defect total " + defectsTotal); /* numberOfFingers = listPoDefect.Count; * if (numberOfFingers > 5) * numberOfFingers = 5;/ * * // Debug.Log ("numberOfFingers " + numberOfFingers); * * // Imgproc.putText (rgbaMat, "" + numberOfFingers, new Point (rgbaMat.cols () / 2, rgbaMat.rows () / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Imgproc.LINE_AA, false); * * * /* foreach (Point p in listPoDefect) { * * Point tempp = GetNearestL(p, listPo); * tempp = ConvertDownscale(tempp, DOWNSCALE_RATIO); * Point p2 = ConvertDownscale(p, DOWNSCALE_RATIO); * * Imgproc.circle (rgbaMat, tempp, 6, new Scalar (0, 0, 255, 255), -1); * Imgproc.circle(rgbaMat, p2, 6, new Scalar(255, 0, 255, 255), -1); * }*/ }
void Process() { string imText = "DRAW PATTERN"; Core.putText(frame_pot, imText, new Point(110, 50), Core.FONT_HERSHEY_COMPLEX, 1.0, new Scalar(255, 0, 0), 2); Mat hierarchy = new Mat (); List<MatOfPoint> contours = new List<MatOfPoint> (); MatOfPoint maxitem = new MatOfPoint (); MatOfInt hullInt = new MatOfInt (); frameclone = frame_thresh_final.clone (); Imgproc.findContours (frameclone, contours, hierarchy, Imgproc.RETR_LIST , Imgproc.CHAIN_APPROX_NONE); maxitem = contours [0]; n = 0; for(int i=0; i<contours.Count; i++){ if(contours[i].total() > maxitem.total()){ maxitem = contours[i]; n=i; } } OpenCVForUnity.Rect bRect = Imgproc.boundingRect (maxitem); int bRect_height = bRect.height; int bRect_width = bRect.width; if (bRect_height < 200 || bRect_width < 200) return; // Drawing Contours on the Frame //Imgproc.drawContours (frame_pot, contours, n, new Scalar(0, 255, 0), 2); Imgproc.convexHull (maxitem, hullInt); List<Point> maxitemPointList = maxitem.toList (); List<int> hullIntList = hullInt.toList (); List<Point> hullPointList = new List<Point> (); for (int j=0; j < hullInt.toList().Count; j++) { hullPointList.Add (maxitemPointList [hullIntList [j]]); } MatOfPoint hullPointMat = new MatOfPoint (); hullPointMat.fromList (hullPointList); List<MatOfPoint> hullPoints = new List<MatOfPoint> (); hullPoints.Add (hullPointMat); // Drawing Convex Hull on the Frame //Imgproc.drawContours (frame_pot, hullPoints, -1, new Scalar (0, 0, 255), 2); MatOfInt4 convexityDef = new MatOfInt4 (); Imgproc.convexityDefects (maxitem, hullInt, convexityDef); List<int> conDefIntList = convexityDef.toList (); List<Point> startpts = new List<Point> (); List<Point> farpts = new List<Point> (); List<Point> endpts = new List<Point> (); int tolerance = (int)(bRect_height/6); //Debug.Log ("Tolerance: " + tolerance); int[] defarray = new int[100]; int coordX = 10000, coordY = 10000; int x1 = (int) sphere1.transform.position.x; int y1 = (int) sphere1.transform.position.y; int x2 = (int) sphere2.transform.position.x; int y2 = (int) sphere2.transform.position.y; int x3 = (int) sphere3.transform.position.x; int y3 = (int) sphere3.transform.position.y; int x4 = (int) sphere4.transform.position.x; int y4 = (int) sphere4.transform.position.y; Point pointer = new Point(); for(int i=0; i < conDefIntList.Count/4 ; i++) { startpts.Add(maxitemPointList[conDefIntList[4*i]]); endpts.Add(maxitemPointList[conDefIntList[4*i+1]]); farpts.Add(maxitemPointList[conDefIntList[4*i+2]]); Point s = startpts[i]; Point e = endpts[i]; Point f = farpts[i]; if (GetDistance(s,f) > tolerance) { //Core.circle(frame_pot, s, 15, new Scalar(255, 225, 0), -1); if (s.y < coordY) { pointer = s; coordY = (int) s.y; coordX = (int) s.x; } } } Core.circle(frame_pot, pointer, 15, new Scalar(255, 225, 0), -1); coordX = coordX - 240; coordY = -coordY + 320; if (coordX > x1-50 && coordX < x1+50 && coordY > y1-50 && coordY < y1+50) { if (previous.Equals('1')) return; input += "1"; AddLine(previous, '1'); previous = '1'; Material mat1 = sphere1.GetComponent<Renderer>().material; mat1.color = Color.yellow; StartCoroutine(WaitAndChangeColor("1")); } else if (coordX > x2-50 && coordX < x2+50 && coordY > y2-50 && coordY < y2+50) { if (previous.Equals('2')) return; input += "2"; AddLine(previous, '2'); previous = '2'; Material mat2 = sphere2.GetComponent<Renderer>().material; mat2.color = Color.yellow; StartCoroutine(WaitAndChangeColor("2")); } else if (coordX > x3-50 && coordX < x3+50 && coordY > y3-50 && coordY < y3+50) { if (previous.Equals('3')) return; input += "3"; AddLine(previous, '3'); previous = '3'; Material mat3 = sphere3.GetComponent<Renderer>().material; mat3.color = Color.yellow; StartCoroutine(WaitAndChangeColor("3")); } else if (coordX > x4-50 && coordX < x4+50 && coordY > y4-50 && coordY < y4+50) { if (previous.Equals('4')) return; input += "4"; AddLine(previous, '4'); previous = '4'; Material mat4 = sphere4.GetComponent<Renderer>().material; mat4.color = Color.yellow; StartCoroutine(WaitAndChangeColor("4")); } if (input.Length == password.Length) { auth = true; if (input.Equals(password)) { correct = true; } else { correct = false; } } }
/*=============================================* * 輪郭ごとの頂点から手を判別するまで *=============================================*/ /// <summary> /// Contours to hand gesture. /// </summary> /// <param name="rgbaMat">Rgba mat.</param> /// <param name="contour">Contour.</param> private static void _contourToHandGesture(Mat rgbaMat, MatOfPoint contour) { try { //頂点を調査する準備をする _pointOfVertices(rgbaMat, contour); //基準輪郭のサイズの取得と描画(長方形) OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray())); Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(ContourRangeColor), 2, 8, 0); /*=============================================* * 腕まで含んだ手の大きさを取得する **=============================================*/ //腕まで含んだ手の大きさを識別する MatOfInt hull = new MatOfInt(); Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull); //腕まで含んだ手の範囲を取得 List <Point> armPointList = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { Point armPoint = contour.toList()[hull.toList()[j]]; bool addFlag = true; foreach (Point point in armPointList.ToArray()) { //輪郭の1/10より近い頂点は誤差としてまとめる double distance = Mathf.Sqrt((float)((armPoint.x - point.x) * (armPoint.x - point.x) + (armPoint.y - point.y) * (armPoint.y - point.y))); if (distance <= Mathf.Min((float)boundRect.width, (float)boundRect.height) / 10) { addFlag = false; break; } } if (addFlag) { armPointList.Add(armPoint); } } MatOfPoint armMatOfPoint = new MatOfPoint(); armMatOfPoint.fromList(armPointList); List <MatOfPoint> armPoints = new List <MatOfPoint>(); armPoints.Add(armMatOfPoint); //腕まで含んだ手の範囲を描画 Imgproc.drawContours(rgbaMat, armPoints, -1, HGColorSpuiter.ColorToScalar(ArmRangeColor), 3); //腕まで含んだ手が三角形の場合はそれ以上の識別が難しい if (hull.toArray().Length < 3) { return; } /*=============================================* * 掌の大きさを取得する **=============================================*/ //凸面の頂点から凹面の点のみを取得し、掌の範囲を取得する MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect); //凹面の点をフィルタリングして取得 List <Point> palmPointList = new List <Point>(); for (int j = 0; j < convexDefect.toList().Count; j = j + 4) { Point farPoint = contour.toList()[convexDefect.toList()[j + 2]]; int depth = convexDefect.toList()[j + 3]; if (depth > depthThreashold && farPoint.y < boundRect.br().y - boundRect.tl().y) { palmPointList.Add(contour.toList()[convexDefect.toList()[j + 2]]); } } MatOfPoint palmMatOfPoint = new MatOfPoint(); palmMatOfPoint.fromList(palmPointList); List <MatOfPoint> palmPoints = new List <MatOfPoint>(); palmPoints.Add(palmMatOfPoint); //掌の範囲を描画 Imgproc.drawContours(rgbaMat, palmPoints, -1, HGColorSpuiter.ColorToScalar(PalmRangeColor), 3); /*=============================================* * 掌+指先の大きさを取得する **=============================================*/ //掌の位置を元に手首を除いた範囲を取得する List <Point> handPointList = new List <Point>(); handPointList.AddRange(armPointList.ToArray()); handPointList.Reverse(); handPointList.RemoveAt(0); handPointList.Insert(0, palmPointList.ToArray()[0]); handPointList.RemoveAt(handPointList.Count - 1); handPointList.Insert(handPointList.Count, palmPointList.ToArray()[palmPointList.Count - 1]); MatOfPoint handMatOfPoint = new MatOfPoint(); handMatOfPoint.fromList(handPointList); List <MatOfPoint> handPoints = new List <MatOfPoint>(); handPoints.Add(handMatOfPoint); Imgproc.drawContours(rgbaMat, handPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3); /*=============================================* * 指先の位置を取得する **=============================================*/ //掌の各頂点の中心を求める List <Point> palmCenterPoints = new List <Point>(); for (int i = 0; i < palmPointList.Count; i++) { Point palmPoint = palmPointList.ToArray()[i]; Point palmPointNext = new Point(); if (i + 1 < palmPointList.Count) { palmPointNext = palmPointList.ToArray()[i + 1]; } else { palmPointNext = palmPointList.ToArray()[0]; } Point palmCenterPoint = new Point((palmPoint.x + palmPointNext.x) / 2, (palmPoint.y + palmPointNext.y) / 2); palmCenterPoints.Add(palmCenterPoint); } //掌の頂点から最も近い手の頂点を求める for (int i = 0; i < palmCenterPoints.Count && i + 1 < handPointList.Count && i < 5; i++) { Point palmPoint = palmCenterPoints.ToArray()[i]; List <Point> fingerList = new List <Point>(); fingerList.Add(palmPoint); fingerList.Add(handPointList.ToArray()[i + 1]); MatOfPoint fingerPoint = new MatOfPoint(); fingerPoint.fromList(fingerList); List <MatOfPoint> fingerPoints = new List <MatOfPoint>(); fingerPoints.Add(fingerPoint); Imgproc.drawContours(rgbaMat, fingerPoints, -1, HGColorSpuiter.ColorToScalar(FingerRangeColor), 3); } // Imgproc.putText(rgbaMat, "", new Point(2, rgbaMat.rows()-30), Core.FONT_HERSHEY_SIMPLEX, 1.0, HGColorSpuiter.ColorToScalar(Color.black), 2, Imgproc.LINE_AA, false); } catch (System.Exception e) { Debug.Log(e.Message); } }