public void ProcessFinger(Mat rgbaImage) { Imgproc.pyrDown(rgbaImage, mPyrDownMat); Imgproc.pyrDown(mPyrDownMat, mPyrDownMat); Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL); Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA); Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb); Core.inRange(mHsvMat, fLowerBoundHSV, fUpperBoundHSV, fMaskHSV); fMask = fMaskHSV; Imgproc.dilate(fMask, fDilatedMask, new Mat()); List <MatOfPoint> contoursFinger = new List <MatOfPoint>(); Imgproc.findContours(fDilatedMask, contoursFinger, fHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); if (contoursFinger.Count == 0) { FingerContour = null; return; } // Find max contour area double maxArea = 0; MatOfPoint biggestContour = null; foreach (MatOfPoint each in contoursFinger) { MatOfPoint wrapper = each; double area = Imgproc.contourArea(wrapper); if (area > maxArea) { maxArea = area; biggestContour = each; } } if (maxArea < 130) { FingerContour = null; return; } //Debug.Log("Finger contour area" + maxArea.ToString()); MatOfPoint2f contours_res2f = new MatOfPoint2f(); MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray()); Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true); FingerContour = new MatOfPoint(contours_res2f.toArray()); contours_res2f.Dispose(); biggestContour2f.Dispose(); if (Imgproc.contourArea(FingerContour) > mMinContourArea * maxArea) { Core.multiply(FingerContour, new Scalar(4, 4), FingerContour); } }
private void GetCubies(List <MatOfPoint> contours, Mat imgMat, int index, List <Cubies> cubies) { MatOfPoint2f matOfPoint2f = new MatOfPoint2f(); MatOfPoint2f approxCurve = new MatOfPoint2f(); MatOfPoint approx = new MatOfPoint(); foreach (var contour in contours) { matOfPoint2f.fromList(contour.toList()); Imgproc.approxPolyDP(matOfPoint2f, approxCurve, 0.1 * Imgproc.arcLength(matOfPoint2f, true), true); try { approxCurve.convertTo(approx, CvType.CV_32S); OpenCVForUnity.Rect rect = Imgproc.boundingRect(approx); if (approx.total() == 4) { cubies.Add(new Cubies(rect.x, rect.y, colorsList[index])); Imgproc.rectangle(imgMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 40, 150), 2); } } catch (ArgumentOutOfRangeException e) { } } print("Number of cubies: " + cubies.Count); }
public bool GetPosition(Mat frame, bool isKeyboardFound) { Mat frameProc = new Mat(); //frame.rows(), frame.cols(), CvType.CV_16UC3 Mat frameMask = new Mat(); Mat hierarchy = new Mat(); Imgproc.cvtColor(frame, frameProc, Imgproc.COLOR_BGR2HSV); Scalar lowerB = new Scalar(HueLower, SatLower, ValLower); Scalar upperB = new Scalar(HueUpper, SatUpper, ValUpper); Core.inRange(frameProc, lowerB, upperB, frameMask); Core.bitwise_and(frame, frame, frameProc, frameMask); //Imgproc.bilateralFilter(frameProc, frameProc, 9, 50, 100); Imgproc.morphologyEx(frameProc, frameProc, 2, Mat.ones(5, 5, CvType.CV_8U)); // Imgproc.dilate(frameProc, frameProc, Mat.ones(5, 5, CvType.CV_8U)); //Mat.ones(5, 5, CvType.CV_8U), anchor: new Point(-1, -1), iteration:2 Imgproc.cvtColor(frameProc, frameProc, Imgproc.COLOR_BGR2GRAY); List <MatOfPoint> contoursList = new List <MatOfPoint>(); Imgproc.findContours(frameProc, contoursList, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); int count = 0; foreach (MatOfPoint contour in contoursList) { MatOfPoint2f approx = new MatOfPoint2f(); MatOfPoint2f contourf = new MatOfPoint2f(contour.toArray()); Imgproc.approxPolyDP(contourf, approx, 0.01 * Imgproc.arcLength(contourf, true), true); //print(approx.dump()); if (approx.rows() == 4 && Imgproc.contourArea(contour) >= min_area) { count++; if (count >= 2) { continue; } else { OpenCVForUnity.CoreModule.Rect track_win = Imgproc.boundingRect(approx); TrackWindow = new int[] { track_win.x, track_win.y, track_win.width, track_win.height }; if (frame.height() - 5 < TrackWindow[0] + TrackWindow[2] && TrackWindow[0] + TrackWindow[2] <= frame.height() || 0 <= TrackWindow[0] && TrackWindow[0] < 5 || frame.width() - 5 < TrackWindow[1] + TrackWindow[3] && TrackWindow[1] + TrackWindow[3] <= frame.width() || 0 <= TrackWindow[1] && TrackWindow[1] < 5) { continue; } else { Approx = approx; Contour = contour; return(isKeyboardFound = true); } } } } return(isKeyboardFound = false); }
public bool verificaImagemContorno(Texture2D textParam) { var bytes = textParam.EncodeToJPG(); File.WriteAllBytes("imagem1_tratamento.png", bytes); Texture2D camFoto = textParam; // Escala de cinza. CV_8UC1 Mat img1Mat = new Mat(camFoto.height, camFoto.width, CvType.CV_8UC1); Utils.texture2DToMat(camFoto, img1Mat); Imgproc.GaussianBlur(img1Mat, img1Mat, new Size(5, 5), 0); Texture2D tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img1Mat, tex3); bytes = tex3.EncodeToJPG(); File.WriteAllBytes("imagem1_tratamento_gaussian.png", bytes); Imgproc.threshold(img1Mat, img1Mat, 120, 255, Imgproc.THRESH_BINARY); tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img1Mat, tex3); bytes = tex3.EncodeToJPG(); File.WriteAllBytes("imagem1_tratamento_threshold.png", bytes); List <MatOfPoint> srcContours = new List <MatOfPoint>(); Mat srcHierarchy = new Mat(); Imgproc.findContours(img1Mat, srcContours, srcHierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); int totalB = 0, totalQ = 0, totalR = 0, totalP; for (int i = 0; i < srcContours.Count; i++) { Imgproc.drawContours(img1Mat, srcContours, i, new Scalar(100, 100, 100), 2, 8, srcHierarchy, 0, new Point()); } tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img1Mat, tex3); bytes = tex3.EncodeToJPG(); File.WriteAllBytes("imagem1_tratamento_findcountors.png", bytes); for (int i = 0; i < srcContours.Count; i++) { MatOfPoint2f mont = new MatOfPoint2f(srcContours[i].toArray()); var aprox = new MatOfPoint2f(); Imgproc.approxPolyDP(mont, aprox, 0.01 * Imgproc.arcLength(mont, true), true); Debug.Log(aprox.size()); //if (aprox.size().area == 3) //{ // Debug.Log("Triangulo"); //} } return(false); }
/// <summary> /// Points the of vertices. /// </summary> /// <param name="contour">Contour.</param> private static void _pointOfVertices(Mat rgbaMat, MatOfPoint contour) { //multiplyでガウシアンピラミッドで分解されたサイズを掛け算で実画像サイズに戻す Core.multiply(contour, new Scalar(4, 4), contour); //輪郭の頂点がまだらにあるので識別しやすいようにポリゴン近似でサンプリングする。 MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true); contour = new MatOfPoint(pointMat.toArray()); }
//辨識輪廓 private bool analysisContoursRect(int index, List <MatOfPoint> contours, Mat result, List <MatchObject> matchObject) { OpenCVForUnity.Rect _testDepthRect = Imgproc.boundingRect(contours[index]); float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth; if (_testDepthRect.area() > minAreaSize) { //宣告放置點資料 MatOfInt hullInt = new MatOfInt(); List <Point> hullPointList = new List <Point>(); MatOfPoint hullPointMat = new MatOfPoint(); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); MatOfInt4 defects = new MatOfInt4(); //篩選點資料 MatOfPoint2f Temp2f = new MatOfPoint2f(); //Convert contours(i) from MatOfPoint to MatOfPoint2f contours[index].convertTo(Temp2f, CvType.CV_32FC2); //Processing on mMOP2f1 which is in type MatOfPoint2f Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true); //Convert back to MatOfPoint and put the new values back into the contours list Temp2f.convertTo(contours[index], CvType.CV_32S); //计算轮廓围绕的凸形壳 Imgproc.convexHull(contours[index], hullInt); List <Point> pointMatList = contours[index].toList(); List <int> hullIntList = hullInt.toList(); for (int j = 0; j < hullInt.toList().Count; j++) { hullPointList.Add(pointMatList[hullIntList[j]]); hullPointMat.fromList(hullPointList); hullPoints.Add(hullPointMat); } if (hullInt.toList().Count == 4) { if (!setMatchObject(index, pointMatList, contours, hullPoints, result, matchObject)) { //Debug.Log("setMatchObject fail"); } } //清空記憶體 defects.Dispose(); hullPointList.Clear(); hullPointMat.Dispose(); hullInt.Dispose(); hullPoints.Clear(); return(true); } return(false); }
private void EstimateHand(Mat mat, List <MatOfPoint> contours, RecordHandDetectResult resultSetter) { //画像処理としてはcontourがあったが、今調べてる側については if (contours.Count == 0) { resultSetter.HasValidHandArea = false; return; } var contour = SelectLargestContour(contours); var boundRect = Imgproc.boundingRect(contour); //画像の下側で手首の凹み部分を検出することがあるのを、指の凹みと誤認識しないためのガードです。 double defectMinY = boundRect.y + boundRect.height * 0.7; var pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true); contour = new MatOfPoint(pointMat.toArray()); var handArea = Imgproc.minAreaRect(pointMat); var handAreaCenter = handArea.center; var handAreaSize = handArea.size; //方向固定のBoundを使うとこう。 resultSetter.HandAreaCenter = new Vector2(boundRect.x + boundRect.width / 2, boundRect.y + boundRect.height / 2); resultSetter.HandAreaSize = new Vector2(boundRect.width, boundRect.height); resultSetter.HandAreaRotation = (float)handArea.angle; //OBBを使うとこうなるが、これだけだとangleが45度超えてるときの挙動が直感に反する事があるので要注意 // resultSetter.HandAreaCenter = new Vector2((float)handAreaCenter.x, (float)handAreaCenter.y); // resultSetter.HandAreaSize = new Vector2((float)handAreaSize.width, (float)handAreaSize.height); // resultSetter.HandAreaRotation = (float)handArea.angle; Imgproc.convexHull(contour, _hullIndices); var hullIndicesArray = _hullIndices.toArray(); //通常ありえないが、凸包がちゃんと作れてないケース if (hullIndicesArray.Length < 3) { resultSetter.HasValidHandArea = false; return; } UpdateConvexityDefection(contour, _hullIndices, defectMinY, resultSetter); }
public string Detect(MatOfPoint2f c) { var shape = "Não identificado"; var peri = Imgproc.arcLength(c, true); var epsilon = 0.04 * peri; var aprox = new MatOfPoint2f(); Imgproc.approxPolyDP(c, aprox, epsilon, true); if (aprox.rows() == 3) { UnityEngine.Debug.Log("Triângulo"); //webCamTextureToMatHelper.Dispose(); } //else if (aprox.rows() == 4) //{ // shape = ""; // var rect = Imgproc.boundingRect(aprox); // var ar = rect.width / rect.height; // if (ar >= 0.95 && ar <= 1.05) // { // UnityEngine.Debug.Log("Quadrado"); // } // else // { // UnityEngine.Debug.Log("Retângulo"); // } //} //else if (aprox.rows() == 5) //{ // UnityEngine.Debug.Log("Pentágono"); //} //else if (aprox.rows() == 0) //{ // UnityEngine.Debug.Log("Círculo"); //} return(shape); }
private void Find4PointContours(Mat image, List <MatOfPoint> contours) { contours.Clear(); List <MatOfPoint> tmp_contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Imgproc.findContours(image, tmp_contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); foreach (var cnt in tmp_contours) { MatOfInt hull = new MatOfInt(); Imgproc.convexHull(cnt, hull, false); Point[] cnt_arr = cnt.toArray(); int[] hull_arr = hull.toArray(); Point[] pts = new Point[hull_arr.Length]; for (int i = 0; i < hull_arr.Length; i++) { pts[i] = cnt_arr[hull_arr[i]]; } MatOfPoint2f ptsFC2 = new MatOfPoint2f(pts); MatOfPoint2f approxFC2 = new MatOfPoint2f(); MatOfPoint approxSC2 = new MatOfPoint(); double arclen = Imgproc.arcLength(ptsFC2, true); Imgproc.approxPolyDP(ptsFC2, approxFC2, 0.01 * arclen, true); approxFC2.convertTo(approxSC2, CvType.CV_32S); if (approxSC2.size().area() != 4) { continue; } contours.Add(approxSC2); } }
//findContours分割技术 private static Mat MyFindLargestRectangle(Mat original_image) { Mat imgSource = original_image; Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY); Imgproc.Canny(imgSource, imgSource, 50, 50); Imgproc.GaussianBlur(imgSource, imgSource, new Size(5, 5), 5); List <MatOfPoint> contours = new List <MatOfPoint>(); Imgproc.findContours(imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); double maxArea = 0; int maxAreaIdx = -1; MatOfPoint largest_contour = contours[0]; MatOfPoint2f approxCurve = new MatOfPoint2f(); for (int idx = 0; idx < contours.Count; idx++) { MatOfPoint temp_contour = contours[idx]; double contourarea = Imgproc.contourArea(temp_contour); if (contourarea - maxArea > 1) { maxArea = contourarea; largest_contour = temp_contour; maxAreaIdx = idx; MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray()); int contourSize = (int)temp_contour.total(); Imgproc.approxPolyDP(new_mat, approxCurve, contourSize * 0.05, true); } } Imgproc.drawContours(imgSource, contours, -1, new Scalar(255, 0, 0), 1); Imgproc.fillConvexPoly(imgSource, largest_contour, new Scalar(255, 255, 255)); Imgproc.drawContours(imgSource, contours, maxAreaIdx, new Scalar(0, 0, 255), 3); return(imgSource); }
/// <summary> /// Hands the pose estimation process. /// </summary> public void handPoseEstimationProcess(Mat rgbaMat) { //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) { return; } List <MatOfPoint> contours = detector.getContours(); detector.process(rgbaMat); // Debug.Log ("Contours count: " + contours.Count); if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours [i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours [boundPos].toArray())); Core.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); // Debug.Log ( // " Row start [" + // (int)boundRect.tl ().y + "] row end [" + // (int)boundRect.br ().y + "] Col start [" + // (int)boundRect.tl ().x + "] Col end [" + // (int)boundRect.br ().x + "]"); double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; // Debug.Log ( // " A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]"); //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 ); Core.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours [boundPos].toArray()), pointMat, 3, true); contours [boundPos] = new MatOfPoint(pointMat.toArray()); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours [boundPos].toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contours [boundPos].toArray()), hull, convexDefect); List <MatOfPoint> hullPoints = new List <MatOfPoint> (); List <Point> listPo = new List <Point> (); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contours [boundPos].toList() [hull.toList() [j]]); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.Add(e); List <MatOfPoint> defectPoints = new List <MatOfPoint> (); List <Point> listPoDefect = new List <Point> (); for (int j = 0; j < convexDefect.toList().Count; j = j + 4) { Point farPoint = contours [boundPos].toList() [convexDefect.toList() [j + 2]]; int depth = convexDefect.toList() [j + 3]; if (depth > threasholdSlider.value && farPoint.y < a) { listPoDefect.Add(contours [boundPos].toList() [convexDefect.toList() [j + 2]]); } // Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]); } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.Add(e2); // Debug.Log ("hull: " + hull.toList ()); // Debug.Log ("defects: " + convexDefect.toList ()); Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); // int defectsTotal = (int)convexDefect.total(); // Debug.Log ("Defect total " + defectsTotal); this.numberOfFingers = listPoDefect.Count; if (this.numberOfFingers > 5) { this.numberOfFingers = 5; } // Debug.Log ("numberOfFingers " + numberOfFingers); // Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false); numberOfFingersText.text = numberOfFingers.ToString(); foreach (Point p in listPoDefect) { Core.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1); } }
/// <summary> /// Finds the candidates. /// </summary> /// <param name="contours">Contours.</param> /// <param name="detectedMarkers">Detected markers.</param> void findCandidates(List <MatOfPoint> contours, List <Marker> detectedMarkers) { MatOfPoint2f approxCurve = new MatOfPoint2f(); List <Marker> possibleMarkers = new List <Marker> (); // For each contour, analyze if it is a parallelepiped likely to be the marker for (int i = 0; i < contours.Count; i++) { // Approximate to a polygon double eps = contours [i].toArray().Length * 0.05; Imgproc.approxPolyDP(new MatOfPoint2f(contours [i].toArray()), approxCurve, eps, true); Point[] approxCurveArray = approxCurve.toArray(); // We interested only in polygons that contains only four points if (approxCurveArray.Length != 4) { continue; } // And they have to be convex if (!Imgproc.isContourConvex(new MatOfPoint(approxCurveArray))) { continue; } // Ensure that the distance between consecutive points is large enough float minDist = float.MaxValue; for (int p = 0; p < 4; p++) { Point side = new Point(approxCurveArray [p].x - approxCurveArray [(p + 1) % 4].x, approxCurveArray [p].y - approxCurveArray [(p + 1) % 4].y); float squaredSideLength = (float)side.dot(side); minDist = Mathf.Min(minDist, squaredSideLength); } // Check that distance is not very small if (minDist < m_minContourLengthAllowed) { continue; } // All tests are passed. Save marker candidate: Marker m = new Marker(); m.points = new MatOfPoint(); List <Point> markerPointsList = new List <Point> (); for (int p = 0; p < 4; p++) { markerPointsList.Add(new Point(approxCurveArray [p].x, approxCurveArray [p].y)); } // Sort the points in anti-clockwise order // Trace a line between the first and second point. // If the third point is at the right side, then the points are anti-clockwise Point v1 = new Point(markerPointsList [1].x - markerPointsList [0].x, markerPointsList [1].y - markerPointsList [0].y); Point v2 = new Point(markerPointsList [2].x - markerPointsList [0].x, markerPointsList [2].y - markerPointsList [0].y); double o = (v1.x * v2.y) - (v1.y * v2.x); if (o < 0.0) //if the third point is in the left side, then sort in anti-clockwise order { Point tmp = markerPointsList [1]; markerPointsList [1] = markerPointsList [3]; markerPointsList [3] = tmp; } m.points.fromList(markerPointsList); possibleMarkers.Add(m); } approxCurve.Dispose(); //Debug.Log ("possibleMarkers " + possibleMarkers.Count); // Remove these elements which corners are too close to each other. // First detect candidates for removal: List <Point> tooNearCandidates = new List <Point> (); for (int i = 0; i < possibleMarkers.Count; i++) { Marker m1 = possibleMarkers [i]; Point[] m1PointsArray = m1.points.toArray(); //calculate the average distance of each corner to the nearest corner of the other marker candidate for (int j = i + 1; j < possibleMarkers.Count; j++) { Marker m2 = possibleMarkers [j]; Point[] m2PointsArray = m2.points.toArray(); float distSquared = 0; for (int c = 0; c < 4; c++) { Point v = new Point(m1PointsArray [c].x - m2PointsArray [c].x, m1PointsArray [c].y - m2PointsArray [c].y); distSquared += (float)v.dot(v); } distSquared /= 4; if (distSquared < 100) { tooNearCandidates.Add(new Point(i, j)); } } } // Mark for removal the element of the pair with smaller perimeter List <bool> removalMask = new List <bool> (possibleMarkers.Count); for (int i = 0; i < possibleMarkers.Count; i++) { removalMask.Add(false); } for (int i = 0; i < tooNearCandidates.Count; i++) { float p1 = perimeter(possibleMarkers [(int)tooNearCandidates [i].x].points); float p2 = perimeter(possibleMarkers [(int)tooNearCandidates [i].y].points); int removalIndex; if (p1 > p2) { removalIndex = (int)tooNearCandidates [i].x; } else { removalIndex = (int)tooNearCandidates [i].y; } removalMask [removalIndex] = true; } // Return candidates detectedMarkers.Clear(); for (int i = 0; i < possibleMarkers.Count; i++) { if (!removalMask [i]) { detectedMarkers.Add(possibleMarkers [i]); } } }
//利用深度的輪廓做RGB的顏色判斷 public Mat getContours(Mat srcColorMat, Mat srcDepthMat) { Mat ColorMat = new Mat(); Mat DepthMat = new Mat(); Mat HsvMat = new Mat(); srcColorMat.copyTo(ColorMat); srcDepthMat.copyTo(DepthMat); Imgproc.cvtColor(ColorMat, HsvMat, Imgproc.COLOR_BGR2HSV); List <ColorObject> colorObjects = new List <ColorObject>(); Mat resultMat = new Mat(DepthMat.height(), DepthMat.width(), CvType.CV_8UC1); Mat hierarchy = new Mat(); List <Point> ConsistP = new List <Point>(); List <MatOfPoint> contours = new List <MatOfPoint>(); List <List <Point> > trianglePointList = new List <List <Point> >(); Imgproc.findContours(DepthMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); int numObjects = contours.Count; List <Scalar> clickRGB = new List <Scalar>(); List <Scalar> clickHSV = new List <Scalar>(); List <int> HullCountList = new List <int>(); for (int i = 0; i < numObjects; i++) { Imgproc.drawContours(resultMat, contours, i, new Scalar(255), 1); } double[] GetRGB = new double[10]; float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth; if (numObjects > 0) { for (int index = 0; index < numObjects; index++) { OpenCVForUnity.Rect R0 = Imgproc.boundingRect(contours[index]); if (R0.area() > minAreaSize) { //宣告放置點資料 MatOfInt hullInt = new MatOfInt(); List <Point> hullPointList = new List <Point>(); MatOfPoint hullPointMat = new MatOfPoint(); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); MatOfInt4 defects = new MatOfInt4(); //篩選點資料 MatOfPoint2f Temp2f = new MatOfPoint2f(); //Convert contours(i) from MatOfPoint to MatOfPoint2f contours[index].convertTo(Temp2f, CvType.CV_32FC2); //Processing on mMOP2f1 which is in type MatOfPoint2f Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true); //Convert back to MatOfPoint and put the new values back into the contours list Temp2f.convertTo(contours[index], CvType.CV_32S); //计算轮廓围绕的凸形壳 Imgproc.convexHull(contours[index], hullInt); List <Point> pointMatList = contours[index].toList(); List <int> hullIntList = hullInt.toList(); for (int j = 0; j < hullInt.toList().Count; j++) { hullPointList.Add(pointMatList[hullIntList[j]]); hullPointMat.fromList(hullPointList); hullPoints.Add(hullPointMat); } ConsistP.Add(new Point(R0.x, R0.y)); ConsistP.Add(new Point(R0.x + R0.width, R0.y + R0.height)); ConsistP.Add(new Point(R0.x + R0.width, R0.y)); ConsistP.Add(new Point(R0.x, R0.y + R0.height)); clickRGB.Add(clickcolor(ColorMat, R0)); clickHSV.Add(clickcolor(HsvMat, R0)); HullCountList.Add(hullIntList.Count); trianglePointList.Add(pointMatList); //清空記憶體 defects.Dispose(); hullPointList.Clear(); hullPointMat.Dispose(); hullInt.Dispose(); hullPoints.Clear(); //Debug.Log("ID = " + index + " Color = " + clickcolor(ColorMat, R0)); } } //使用顏色找尋物體 _matchColorObjectList = setColorMatchObject(ConsistP, trianglePointList, clickRGB, clickHSV, resultMat, HullCountList); } return(resultMat); }
public void ProcessSkin(Mat rgbaImage) { Imgproc.pyrDown(rgbaImage, mPyrDownMat); Imgproc.pyrDown(mPyrDownMat, mPyrDownMat); Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL); Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA); Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb); Core.inRange(mHsvMat, mLowerBoundHSV, mUpperBoundHSV, mMaskHSV); Core.inRange(mPyrDownMat, mLowerBoundRGB, mUpperBoundRGB, mMaskRGB); Core.inRange(mYCrCbMat, mLowerBoundYCrCb, mUpperBoundYCrCb, mMaskYCrCb); mMask = mMaskYCrCb & mMaskHSV & mMaskRGB; Imgproc.dilate(mMask, mDilatedMask, new Mat()); List <MatOfPoint> contours = new List <MatOfPoint>(); Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); if (contours.Count == 0) { return; } // Find max contour area double maxArea = 0; double secondMaxArea = 0; MatOfPoint biggestContour = null; MatOfPoint secondBiggestContour = null; foreach (MatOfPoint each in contours) { MatOfPoint wrapper = each; double area = Imgproc.contourArea(wrapper); if (area > maxArea) { secondMaxArea = maxArea; secondBiggestContour = biggestContour; maxArea = area; biggestContour = each; } else if (area > secondMaxArea) { secondMaxArea = area; secondBiggestContour = each; } } handContourSize = maxArea; if ((biggestContour != null) && (secondBiggestContour != null) && (ComputeAVGXForContour(biggestContour) >= ComputeAVGXForContour(secondBiggestContour)) && (secondMaxArea >= 0.3 * maxArea)) { biggestContour = secondBiggestContour; handContourSize = secondMaxArea; } MatOfPoint2f contours_res2f = new MatOfPoint2f(); MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray()); Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true); handContour = new MatOfPoint(contours_res2f.toArray()); contours_res2f.Dispose(); biggestContour2f.Dispose(); if (Imgproc.contourArea(handContour) > mMinContourArea * maxArea) { Core.multiply(handContour, new Scalar(4, 4), handContour); } // Filter contours by area and resize to fit the original image size mContours.Clear(); foreach (MatOfPoint each in contours) { MatOfPoint contour = each; if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) { Core.multiply(contour, new Scalar(4, 4), contour); mContours.Add(contour); } } }
private void HandPoseEstimationProcess(Mat rgbaMat) { // rgbaMat.copyTo(mRgba); float DOWNSCALE_RATIO = 1.0f; if (enableDownScale) { mRgba = imageOptimizationHelper.GetDownScaleMat(rgbaMat); DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio; } else { // mRgba = rgbaMat; rgbaMat.copyTo(mRgba); DOWNSCALE_RATIO = 1.0f; } // Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(mRgba, mRgba, new Size(3, 3), 1, 1); // Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) { return; } List <MatOfPoint> contours = detector.GetContours(); detector.Process(mRgba); // Debug.Log ("Contours count: " + contours.Count); if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } MatOfPoint contour = contours[boundPos]; OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray())); Imgproc.rectangle(mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); // Debug.Log ( // " Row start [" + // (int)boundRect.tl ().y + "] row end [" + // (int)boundRect.br ().y + "] Col start [" + // (int)boundRect.tl ().x + "] Col end [" + // (int)boundRect.br ().x + "]"); double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; // Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]"); // Imgproc.rectangle(mRgba, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true); contour = new MatOfPoint(pointMat.toArray()); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); List <Point> listPo = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contour.toList()[hull.toList()[j]] * DOWNSCALE_RATIO); } /* * MatOfPoint e = new MatOfPoint(); * e.fromList(listPo); * hullPoints.Add(e); * * List<Point> listPoDefect = new List<Point>(); * * if (convexDefect.rows() > 0) * { * List<int> convexDefectList = convexDefect.toList(); * List<Point> contourList = contour.toList(); * for (int j = 0; j < convexDefectList.Count; j = j + 4) * { * Point farPoint = contourList[convexDefectList[j + 2]]; * int depth = convexDefectList[j + 3]; * if (depth > threshholdDetect && farPoint.y < a) * { * listPoDefect.Add(contourList[convexDefectList[j + 2]]); * Imgproc.line(rgbaMat, farPoint, listPo[convexDefectList[j + 2]], new Scalar(255, 0, 0, 255),1,1); * } * // Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]); * } * }*/ // Debug.Log ("hull: " + hull.toList ()); // if (convexDefect.rows () > 0) { // Debug.Log ("defects: " + convexDefect.toList ()); // } //Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); for (int p = 0; p < listPo.Count; p++) { if (p % 2 == 0) { Imgproc.circle(rgbaMat, listPo[p], 6, new Scalar(255, 0, 0, 255), -1); // Imgproc.putText(rgbaMat,p.ToString(),listPo[p],1,1,new Scalar(255,0,0,255)); // check if close List <Point> fLMscaled = OpenCVForUnityUtils.ConvertVector2ListToPointList(facePoints); for (int q = 0; q < fLMscaled.Count; q++) { if (ifLessThanDPoint(listPo[p], fLMscaled[q], 8)) { //Point1 = listPo[p]; //Point2 = fLMscaled[q]; handPoint = p; facePoint = q; print(Point1 + " " + Point2); } } if (p == handPoint && facePoint != 0) { Point1 = listPo[p]; Point2 = fLMscaled[facePoint]; Imgproc.line(rgbaMat, Point1, Point2, new Scalar(255, 255, 255, 255)); } } } // int defectsTotal = (int)convexDefect.total(); // Debug.Log ("Defect total " + defectsTotal); /* numberOfFingers = listPoDefect.Count; * if (numberOfFingers > 5) * numberOfFingers = 5;/ * * // Debug.Log ("numberOfFingers " + numberOfFingers); * * // Imgproc.putText (rgbaMat, "" + numberOfFingers, new Point (rgbaMat.cols () / 2, rgbaMat.rows () / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Imgproc.LINE_AA, false); * * * /* foreach (Point p in listPoDefect) { * * Point tempp = GetNearestL(p, listPo); * tempp = ConvertDownscale(tempp, DOWNSCALE_RATIO); * Point p2 = ConvertDownscale(p, DOWNSCALE_RATIO); * * Imgproc.circle (rgbaMat, tempp, 6, new Scalar (0, 0, 255, 255), -1); * Imgproc.circle(rgbaMat, p2, 6, new Scalar(255, 0, 255, 255), -1); * }*/ }
/// <summary> /// Hands the pose estimation process. /// </summary> public void handPoseEstimationProcess(Mat rgbaMat) { //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) { return; } List <MatOfPoint> contours = detector.getContours(); detector.process(rgbaMat); //Debug.Log(contours + " | " + contours.Count); //string[] output = contours.ToArray(); for (int i = 0; i < contours.Count; i++) { //Debug.Log("MatOfPoint2f " + new MatOfPoint2f(contours[i].toArray()) + " | " + i); //Debug.Log("MatOfPoint " + contours [i] + " | " + i); //Imgproc.circle(rgbaMat, contours[i], 6, new Scalar(0, 255, 0, 255), -1); //Debug.Log ("kotka" + MatOfPoint.ReferenceEquals(x, y)); } if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray())); Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); //tochkaX = boundRect.tl ().x; //tochkaY = boundRect.tl ().y; Imgproc.circle(rgbaMat, boundRect.tl(), 6, new Scalar(0, 255, 0, 255), -1); Imgproc.circle(rgbaMat, boundRect.br(), 6, new Scalar(0, 255, 0, 255), -1); pointbX = boundRect.br().x; pointbY = boundRect.br().y; pointaX = boundRect.x; pointbY = boundRect.y; double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true); contours[boundPos] = new MatOfPoint(pointMat.toArray()); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); List <Point> listPo = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contours[boundPos].toList()[hull.toList()[j]]); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.Add(e); List <MatOfPoint> defectPoints = new List <MatOfPoint>(); List <Point> listPoDefect = new List <Point>(); for (int j = 0; j < convexDefect.toList().Count; j = j + 4) { Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]]; int depth = convexDefect.toList()[j + 3]; if (depth > 8700 && farPoint.y < a) { listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]); } } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.Add(e2); Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); this.numberOfFingers = listPoDefect.Count; if (this.numberOfFingers > 5) { this.numberOfFingers = 5; } foreach (Point p in listPoDefect) { Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1); } }
public void getAnswerNumber(Mat align) { Mat align_gray = new Mat(), align_edges = new Mat(); Imgproc.cvtColor(align, align_gray, Imgproc.COLOR_RGB2GRAY); Imgproc.Canny(align_gray, align_edges, 50, 50); Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2 + 1, 2 + 1), new Point(1, 1)); Imgproc.dilate(align_edges, align_edges, element); //Shape detection List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Imgproc.findContours(align_edges, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); List <MatOfPoint> hulls = new List <MatOfPoint>(); //Texture2D tex = new Texture2D(align_edges.width(), align_edges.height(), TextureFormat.RGB24, false); //Utils.matToTexture2D(align_edges, tex); //byte[] bytes1 = tex.EncodeToJPG(); //File.WriteAllBytes("D:/2019/OMR/" + "test.png", bytes1); for (int i = 0; i < contours.Count; i++) { MatOfInt hull_temp = new MatOfInt(); Imgproc.convexHull(contours[i], hull_temp); int[] arrIndex = hull_temp.toArray(); Point[] arrContour = contours[i].toArray(); Point[] arrPoints = new Point[arrIndex.Length]; for (int k = 0; k < arrIndex.Length; k++) { arrPoints[k] = arrContour[arrIndex[k]]; } MatOfPoint temp = new MatOfPoint(); temp.fromArray(arrPoints); //Filter outliers if (Imgproc.contourArea(temp) > 90000 && Imgproc.contourArea(temp) < 110000) { hulls.Add(temp); } } List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>(); for (int i = 0; i < hulls.Count; i++) { MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray()); hull2f.Add(newPoint); } List <Rect> rects = new List <Rect>(); for (int i = 0; i < hulls.Count; i++) { //Approximate polygon MatOfPoint2f approx = new MatOfPoint2f(); Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true); List <Point> approx_polygon = approx.toList(); approx_polygon = Scannerproc.filterPolygon(approx_polygon); double area = Imgproc.contourArea(approx); if (Scannerproc.isSquare(approx_polygon)) { Rect r = Imgproc.boundingRect(new MatOfPoint(approx_polygon.ToArray())); bool isContain = false; for (int k = 0; k < rects.Count; k++) { if (Scannerproc.distanceTwoPoints(rects[k].tl(), r.tl()) < 100) { //if (rects[k].contains(r) || r.contains(rects[k])) isContain = true; } } if (!isContain) { rects.Add(r); // Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(255, 0, 0, 255), 3); for (int j = 1; j < 21; j++) { Rect roi = new Rect((int)r.tl().x + (int)((r.width * 1.3) / 6), (int)r.tl().y + (r.height / 21) * j, (int)((r.width * 4.7) / 6), r.height / 21); int num = getAnswerNumber(align, roi); if (num != 0) { Imgproc.putText(align, " " + num, new Point(roi.x - 40, roi.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false); Imgproc.rectangle(align, roi.tl(), roi.br(), new Scalar(0, 255, 0, 255), 2); } } } } //Center of mass int cx = 0, cy = 0; for (int k = 0; k < approx_polygon.Count; k++) { cx += (int)approx_polygon[k].x; cy += (int)approx_polygon[k].y; } cx /= approx_polygon.Count; cy /= approx_polygon.Count; // Imgproc.circle(roi, new Point(cx, cy), 5, new Scalar(255), -1); } if (rects.Count == 4) { nowDetected = false; } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { frame = webCamTextureToMatHelper.GetMat(); frame.copyTo(img_orig); drawing = img_orig.clone(); int lowThreshold = 50;// (int)200;// slider.value; const int ratio = 1; const int kernel_size = 3; Imgproc.cvtColor(img_orig, img_lab, Imgproc.COLOR_BGR2Lab); double omrSize = img_orig.cols() * img_orig.rows(); Imgproc.cvtColor(img_orig, img_gray, Imgproc.COLOR_RGBA2GRAY); Imgproc.GaussianBlur(img_gray, img_gray, new Size(15, 15), 1.5, 1.5); //Gaussian blur Imgproc.erode(img_gray, img_gray, new Mat(), new Point(-1, -1), 1); //Erosion // Imgproc.dilate(img_gray, img_gray, new Mat(), new Point(-1, -1), 10, 1, new Scalar(10)); //Dilation Imgproc.Canny(img_gray, img_edges, lowThreshold, lowThreshold * ratio, kernel_size, false); //Shape detection List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Imgproc.findContours(img_edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); //Texture2D tex = new Texture2D(img_edges.width(), img_edges.height(), TextureFormat.RGB24, false); //Utils.matToTexture2D(img_edges, tex); //byte[] bytes1 = tex.EncodeToJPG(); //File.WriteAllBytes("D:/2019/OMR/" + "test213123.png", bytes1); List <MatOfPoint> hulls = new List <MatOfPoint>(); for (int i = 0; i < contours.Count; i++) { MatOfInt hull_temp = new MatOfInt(); Imgproc.convexHull(contours[i], hull_temp); int[] arrIndex = hull_temp.toArray(); Point[] arrContour = contours[i].toArray(); Point[] arrPoints = new Point[arrIndex.Length]; for (int k = 0; k < arrIndex.Length; k++) { arrPoints[k] = arrContour[arrIndex[k]]; } MatOfPoint temp = new MatOfPoint(); temp.fromArray(arrPoints); //Filter outliers if (Imgproc.contourArea(temp) > omrSize / 3 && Imgproc.contourArea(temp) < (omrSize * 4) / 5) { hulls.Add(temp); } } List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>(); for (int i = 0; i < hulls.Count; i++) { MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray()); hull2f.Add(newPoint); } for (int i = 0; i < hulls.Count; i++) { //Approximate polygon MatOfPoint2f approx = new MatOfPoint2f(); Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true); List <Point> approx_polygon = approx.toList(); // approx_polygon = Scannerproc.filterPolygon(approx_polygon); // Debug.Log(approx_polygon.Count); if (!Scannerproc.isSquare(approx_polygon)) { continue; } else { nowRectPoints.Clear(); nowRectPoints.AddRange(approx_polygon); perspectiveAlign(); } //Center of mass int cx = 0, cy = 0; for (int k = 0; k < approx_polygon.Count; k++) { cx += (int)approx_polygon[k].x; cy += (int)approx_polygon[k].y; } cx /= approx_polygon.Count; cy /= approx_polygon.Count; Scannerproc.drawShape(drawing, approx_polygon, new Scalar(0, 255, 0)); } if (showTextureOnScreen) { showCurrentTextureOnScreen(); } } }
void FormatImageSquare() { Mat mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3); Mat grayMat = new Mat(); //Convert Texture2d to Matrix Utils.texture2DToMat(baseTexture, mainMat); //copy main matrix to grayMat mainMat.copyTo(grayMat); //Convert color to gray Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY); //Blur Imgproc.GaussianBlur(grayMat, grayMat, new Size(25, 25), 0); //contrast Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU); //extract edge Imgproc.Canny(grayMat, grayMat, 50, 50); //prepare for the finding contours List <MatOfPoint> contours = new List <MatOfPoint>(); //find the contour from canny edge image Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); List <MatOfPoint> tempTargets = new List <MatOfPoint>(); for (int i = 0; i < contours.Count; i++) { MatOfPoint cp = contours[i]; MatOfPoint2f cn = new MatOfPoint2f(cp.toArray()); double p = Imgproc.arcLength(cn, true); MatOfPoint2f approx = new MatOfPoint2f(); //convret to polygon Imgproc.approxPolyDP(cn, approx, 0.03 * p, true); //find a contour with four points and large area int minContourArea = 10000; if (approx.toArray().Length == 4 && Imgproc.contourArea(approx) > minContourArea) { MatOfPoint approxPt = new MatOfPoint(); approx.convertTo(approxPt, CvType.CV_32S); float maxCosine = 0; for (int j = 2; j < 5; j++) { Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y)); Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y)); float angle = Mathf.Abs(Vector2.Angle(v1, v2)); maxCosine = Mathf.Max(maxCosine, angle); } if (maxCosine < 135f) { tempTargets.Add(approxPt); } } } if (tempTargets.Count > 0) { //Get the first contour MatOfPoint approxPt = tempTargets[0]; //Making Source Mat Mat srcPointMat = Converters.vector_Point_to_Mat(approxPt.toList(), CvType.CV_32F); //Making Destination Mat /*change these values*/ List <Point> dstPoints = new List <Point>(); dstPoints.Add(new Point(512, 0)); dstPoints.Add(new Point(0, 0)); dstPoints.Add(new Point(0, 512)); dstPoints.Add(new Point(512, 512)); Mat dstPointMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F); //Make Perp transform Mat M = Imgproc.getPerspectiveTransform(srcPointMat, dstPointMat); Mat warpedMat = new Mat(mainMat.size(), mainMat.type()); //Crop and warp the image Imgproc.warpPerspective(mainMat, warpedMat, M, new Size(512, 512), Imgproc.INTER_LINEAR); warpedMat.convertTo(warpedMat, CvType.CV_8UC3); //Convert color to gray Imgproc.cvtColor(warpedMat, warpedMat, Imgproc.COLOR_BGR2GRAY); ////Blur //Imgproc.GaussianBlur(warpedMat, warpedMat, new Size(25, 25), 0); //contrast Imgproc.threshold(warpedMat, warpedMat, 0, 255, Imgproc.THRESH_OTSU); //resize Imgproc.resize(warpedMat, warpedMat, new Size(28, 28)); //Create an empty final texture finalTexture = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false); //Convert material to texture2d Utils.matToTexture2D(warpedMat, finalTexture); targetRawImage.texture = finalTexture; } }
private void HandPoseEstimationProcess(Mat rgbaMat) { //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!isColorSelected) { return; } List <MatOfPoint> contours = detector.GetContours(); detector.Process(rgbaMat); //Debug.Log ("Contours count: " + contours.Count); if (contours.Count <= 0) { return; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } MatOfPoint contour = contours[boundPos]; OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray())); Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); // Debug.Log ( // " Row start [" + //(int)boundRect.tl ().y + "] row end [" + // (int)boundRect.br ().y + "] Col start [" + // (int)boundRect.tl ().x + "] Col end [" + // (int)boundRect.br ().x + "]"); Point bottomLeft = new Point(boundRect.x, boundRect.y + boundRect.height); Point topLeft = new Point(boundRect.x, boundRect.y); Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height); Point topRight = new Point(boundRect.x + boundRect.width, boundRect.y); rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y), //topleft new Point(boundRect.x + boundRect.width, boundRect.y), //Top Right new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right new Point(boundRect.x, boundRect.y + boundRect.height) //Bottom Left ); //double a = boundRect.br ().y - boundRect.tl ().y; //a = a * 0.7; //a = boundRect.tl ().y + a; //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]"); //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0); List <Point3> m_markerCorners3dList = new List <Point3>(); m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A) m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B) m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C) m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D) m_markerCorners3d.fromList(m_markerCorners3dList); //estimate pose Mat Rvec = new Mat(); Mat Tvec = new Mat(); Mat raux = new Mat(); Mat taux = new Mat(); Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux); raux.convertTo(Rvec, CvType.CV_32F); taux.convertTo(Tvec, CvType.CV_32F); rotMat = new Mat(3, 3, CvType.CV_64FC1); Calib3d.Rodrigues(Rvec, rotMat); transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0])); transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0])); transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); //Debug.Log ("transformationM " + transformationM.ToString ()); Rvec.Dispose(); Tvec.Dispose(); raux.Dispose(); taux.Dispose(); rotMat.Dispose(); ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM; //Debug.Log("arM " + ARM.ToString()); if (ARGameObject != null) { ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); if (deactivateCoroutine == null) { deactivateCoroutine = StartCoroutine(Wait(10.0f)); } ARGameObject.SetActive(true); } //end pose estimation MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true); contour = new MatOfPoint(pointMat.toArray()); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect); List <MatOfPoint> hullPoints = new List <MatOfPoint>(); List <Point> listPo = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contour.toList()[hull.toList()[j]]); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.Add(e); List <Point> listPoDefect = new List <Point>(); if (convexDefect.rows() > 0) { List <int> convexDefectList = convexDefect.toList(); List <Point> contourList = contour.toList(); for (int j = 0; j < convexDefectList.Count; j = j + 4) { Point farPoint = contourList[convexDefectList[j + 2]]; int depth = convexDefectList[j + 3]; //if (depth > threasholdSlider.value && farPoint.y < a) //{ // listPoDefect.Add(contourList[convexDefectList[j + 2]]); //} //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]); } } Debug.Log("hull: " + hull.toList()); if (convexDefect.rows() > 0) { Debug.Log("defects: " + convexDefect.toList()); } //use these contours to do heart detection Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3); int defectsTotal = (int)convexDefect.total(); Debug.Log("Defect total " + defectsTotal); this.numberOfFingers = listPoDefect.Count; if (this.numberOfFingers > 5) { this.numberOfFingers = 5; } Debug.Log("numberOfFingers " + numberOfFingers); Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false); numberOfFingersText.text = numberOfFingers.ToString(); foreach (Point p in listPoDefect) { Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1); } }
string getFeatureTangram(string path) { Mat rgbMat = Imgcodecs.imread(path); var width = rgbMat.width(); var height = rgbMat.height(); var ofsetx = 0; var ofsety = 0; if (width > 4096) { ofsetx = (width - 4096) / 2; } if (height > 4096) { ofsety = (height - 4096) / 2; } var rat = (float)rgbMat.width() / (float)rgbMat.height(); Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_RGBA2BGR); Mat rgbMat2 = new Mat(rgbMat.size(), rgbMat.type()); if (debug == true) { mainDebug.GetComponent <AspectRatioFitter>().aspectRatio = rat; debug1.GetComponent <AspectRatioFitter>().aspectRatio = rat; debug2.GetComponent <AspectRatioFitter>().aspectRatio = rat; debug3.GetComponent <AspectRatioFitter>().aspectRatio = rat; debug4.GetComponent <AspectRatioFitter>().aspectRatio = rat; debug5.GetComponent <AspectRatioFitter>().aspectRatio = rat; debug6.GetComponent <AspectRatioFitter>().aspectRatio = rat; debug7.GetComponent <AspectRatioFitter>().aspectRatio = rat; debug8.GetComponent <AspectRatioFitter>().aspectRatio = rat; } Mat hsvMat = new Mat(); Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV); Debug.Log(rgbMat.width()); if (debug == true) { maxHeight = (int)(maxWidth / rat); texture = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); dbTxt1 = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); dbTxt2 = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); dbTxt3 = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); dbTxt4 = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); dbTxt5 = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); dbTxt6 = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); dbTxt7 = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); dbTxt8 = new Texture2D(maxWidth, maxHeight, TextureFormat.RGBA32, false); mainDebug.texture = texture; debug1.texture = dbTxt1; debug2.texture = dbTxt2; debug3.texture = dbTxt3; debug4.texture = dbTxt4; debug5.texture = dbTxt5; debug6.texture = dbTxt6; debug7.texture = dbTxt7; debug8.texture = dbTxt8; } if (debug) { Mat a = new Mat(); Imgproc.resize(rgbMat, a, new Size(maxWidth, maxHeight)); Utils.matToTexture2D(a, dbTxt4); } Mat threshold = new Mat(); Mat threshold2 = new Mat(); List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); MatOfPoint2f mop2f = new MatOfPoint2f(); TangramShape blackShape = new TangramShape(); List <MyShape> ls_shapes = new List <MyShape>(); blackShape.datas = ls_shapes; bool[] OK = new bool[7]; for (var obj_i = 0; obj_i < 7; obj_i++) { var obj = ls_obj[obj_i]; Core.inRange(hsvMat, obj.HSVmin, obj.HSVmax, threshold); if (obj_i == (int)tgr.RED) { Core.inRange(hsvMat, obj.lower_HSVMin, obj.lower_HSVMax, threshold2); threshold2.copyTo(threshold, threshold2); } if (obj_i == (int)tgr.YELLOW) { if (debug) { matToTexture(threshold, dbTxt3); } } contours.Clear(); Imgproc.findContours(threshold, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); for (int ct_i = 0; ct_i < contours.Count; ct_i++) { if (Imgproc.contourArea(contours[ct_i]) < MIN_OBJECT_AREA) { contours.RemoveAt(ct_i); ct_i--; } } Scalar c = hsv2rgb(obj.getColor()); for (int ct_i = 0; ct_i < contours.Count; ct_i++) { var ct = contours[ct_i]; var peri = Imgproc.arcLength(new MatOfPoint2f(ct.toArray()), true); Imgproc.approxPolyDP(new MatOfPoint2f(ct.toArray()), mop2f, 0.05 * peri, true); { MyShape ms = new MyShape(); var points = mop2f.toArray(); var index = -1; var max = -1d; var numPoints = points.Length; ms._id = obj_i; ms.ps = new Point[numPoints]; if (numPoints == 3) { OK[obj_i] = true; for (var p_i = 0; p_i < numPoints; p_i++) { //Debug.LogFormat("p1 = {0}, p2 = {1}", p_i % numPoints, (p_i + 1) % numPoints); var p1 = points[p_i % numPoints]; var p2 = points[(p_i + 1) % numPoints]; var vt = p2 - p1; float len = (float)(vt.x * vt.x + vt.y * vt.y); if (len > max) { index = p_i; max = len; } } var i_nhon1 = index; var i_nhon2 = (index + 1) % numPoints; var i_vuong = (index + 2) % numPoints; ms.ps[0] = points[i_vuong]; ms.ps[1] = points[i_nhon1]; ms.ps[2] = points[i_nhon2]; Imgproc.putText(rgbMat2, "1", points[i_nhon1], 1, 20, c, 10); Imgproc.putText(rgbMat2, "2", points[i_nhon2], 1, 20, c, 10); Imgproc.putText(rgbMat2, "0", points[i_vuong], 1, 20, c, 10); } else if (numPoints == 4) { if (obj_i == (int)tgr.YELLOW) { OK[obj_i] = true; Debug.Log("Xin chao the mau vang"); ms.ps[0] = points[0]; ms.ps[1] = points[1]; ms.ps[2] = points[2]; ms.ps[3] = points[3]; } else if (obj_i == (int)tgr.ORANGE) { OK[obj_i] = true; Debug.Log("Xin chao the gioi"); var vt_cheo1 = points[0] - points[2]; var vt_cheo2 = points[1] - points[3]; var len_cheo1 = vt_cheo1.x * vt_cheo1.x + vt_cheo1.y * vt_cheo1.y; var len_cheo2 = vt_cheo2.x * vt_cheo2.x + vt_cheo2.y * vt_cheo2.y; var i_nhon = 0; if (len_cheo2 > len_cheo1) { i_nhon = 1; } ms.ps[0] = points[i_nhon]; ms.ps[1] = points[(i_nhon + 1)]; ms.ps[2] = points[(i_nhon + 2)]; ms.ps[3] = points[(i_nhon + 3) % numPoints]; var i_prvNhon = (i_nhon + 4 - 1) % numPoints; var i_aftNhon = i_nhon + 1; var vt_prvNhon = points[i_prvNhon] - points[i_nhon]; var vt_aftNhon = points[i_aftNhon] - points[i_nhon]; //Imgproc.line(rgbMat2, points[i_prvNhon], points[i_nhon], c, 10); var len_prvNhon = vt_prvNhon.x * vt_prvNhon.x + vt_prvNhon.y * vt_prvNhon.y; var len_aftNhon = vt_aftNhon.x * vt_aftNhon.x + vt_aftNhon.y * vt_aftNhon.y; if (len_prvNhon > len_aftNhon) { ms.isFlip = true; Imgproc.putText(rgbMat2, " IsFLIP", ms.ps[3], 1, 20, c, 10); } else { ms.isFlip = false; Imgproc.putText(rgbMat2, " IsNOTFLIP", ms.ps[3], 1, 20, c, 10); } Debug.Log(ms.ps.Length); Debug.Log((i_nhon + 3) % numPoints); if (debug == true) { Imgproc.putText(rgbMat2, "0", ms.ps[0], 1, 20, c, 10); Imgproc.putText(rgbMat2, "1", ms.ps[1], 1, 20, c, 10); Imgproc.putText(rgbMat2, "2", ms.ps[2], 1, 20, c, 10); Imgproc.putText(rgbMat2, "3", ms.ps[3], 1, 20, c, 10); } } } ls_shapes.Add(ms); } } } for (var ok_i = 0; ok_i < 7; ok_i++) { if (OK[ok_i] == false) { Debug.LogError("Sai mau: " + ok_i); } } if (debug) { Imgproc.circle(rgbMat2, new Point(1851, 3172), 20, yellow.getColor(), 10); Imgproc.circle(rgbMat2, new Point(1245, 2565), 20, yellow.getColor(), 10); Imgproc.circle(rgbMat2, new Point(883, 2925), 20, red.getColor(), 10); Imgproc.circle(rgbMat2, new Point(2100, 1709), 20, red.getColor(), 10); Mat a = new Mat(); Imgproc.resize(rgbMat, a, new Size(maxWidth, maxHeight)); Utils.matToTexture2D(a, texture); Imgproc.resize(hsvMat, a, new Size(maxWidth, maxHeight)); Utils.matToTexture2D(a, dbTxt1); Imgproc.resize(rgbMat2, a, new Size(maxWidth, maxHeight)); Utils.matToTexture2D(a, dbTxt2); } for (int i = 0; i < blackShape.datas.Count; i++) { for (int j = 0; j < blackShape.datas[i].ps.Length; j++) { blackShape.datas[i].ps[j].x -= ofsetx; blackShape.datas[i].ps[j].y -= ofsety; } } var json = JsonUtility.ToJson(blackShape); return(json); }
//public delegate void Process(int[] tgrdeteced); void tagramDetect(Mat t_rgbaMat, Action <TangramResultModel, List <MyShape> > prc) { List <MyShape> lms = new List <MyShape>(); System.Diagnostics.Stopwatch watch = null; long elapsedMs; TangramResultModel trm = null; Observable.Start(() => { mut.WaitOne(); Imgproc.resize(t_rgbaMat, rgbaMat, new Size(nW_goc, nH_goc)); watch = System.Diagnostics.Stopwatch.StartNew(); if (warp != null) { warp.Init(rgbaMat); Mat wMat = warp.warpPerspective(rgbaMat); rgbaMat = wMat.submat(0, nH, 0, nW); } else { rgbaMat = rgbaMat.submat(0, nH, 0, nW); } all_thresh = Mat.zeros(nH, nW, CvType.CV_8UC3); all_thresh_afct = Mat.zeros(nH, nW, CvType.CV_8UC3); dbMat = Mat.zeros(nH, nW, CvType.CV_8UC3); all_thresh_af = Mat.zeros(nH, nW, CvType.CV_8UC3); rgbaMat.copyTo(rgbMat); rgbMat.convertTo(rgbMat2, CvType.CV_8UC3, 0.8, 60); rgbMat2.copyTo(rgbMat2copy); rgbMat.convertTo(rgbMat3, CvType.CV_8UC3, 1, 60); rgbMat.convertTo(rgbMat4, CvType.CV_8UC3, 1.25, 35); rgbMat.convertTo(rgbMat, CvType.CV_8UC3, 1.25, 35); Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV); Imgproc.cvtColor(rgbMat2, hsvMat2, Imgproc.COLOR_RGB2HSV); Imgproc.cvtColor(rgbMat3, hsvMat3, Imgproc.COLOR_RGB2HSV); Imgproc.cvtColor(rgbMat3, hsvMat4, Imgproc.COLOR_RGB2HSV); watch.Stop(); elapsedMs = watch.ElapsedMilliseconds; Mat markers = Mat.zeros(rgbaMat.size(), CvType.CV_32SC1); watch = System.Diagnostics.Stopwatch.StartNew(); for (int obj_i = 0; obj_i < ls_obj.Length; obj_i++) { var obj = ls_obj[obj_i]; if (obj_i == (int)tgr.ORANGE | obj_i == (int)tgr.YELLOW | obj_i == (int)tgr.GREEN) { Core.inRange(hsvMat2, obj.getHSVmin(), obj.getHSVmax(), thresholdMat); } else if (obj_i == (int)tgr.LIGHTBLUE) { Core.inRange(hsvMat, obj.getHSVmin(), obj.getHSVmax(), thresholdMat); } else { Core.inRange(hsvMat, obj.getHSVmin(), obj.getHSVmax(), thresholdMat); } if (obj_i == (int)tgr.RED) { Core.inRange(hsvMat, new Scalar(0, 20, 45), new Scalar(5, 255, 255), thresholdMat2); thresholdMat2.copyTo(thresholdMat, thresholdMat2); } thresholdMatArr[obj_i] = thresholdMat.clone(); } //thresholdMatArr[(int)tgr.LIGHTBLUE].setTo(new Scalar(0), thresholdMatArr[(int)tgr.BLUE]); //thresholdMatArr[(int)tgr.LIGHTBLUE].setTo(new Scalar(0), thresholdMatArr[(int)tgr.GREEN]); for (int obj_i = 0; obj_i < ls_obj.Length; obj_i++) { var obj = ls_obj[obj_i]; all_cts.Clear(); thresholdMat = thresholdMatArr[obj_i]; if (toggle_db[obj_i] == true) { all_thresh.setTo(obj.ColorRGB, thresholdMat); } if (true | obj_i == (int)tgr.PURPLE | obj_i == (int)tgr.YELLOW | obj_i == (int)tgr.RED | obj_i == (int)tgr.GREEN | obj_i == (int)tgr.ORANGE) { Imgproc.erode(thresholdMat, thresholdMat2, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)), new Point(-1, -1), 1); } if (obj_i == (int)tgr.LIGHTBLUE | obj_i == (int)tgr.PURPLE) { Imgproc.erode(thresholdMat, thresholdMat2, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)), new Point(-1, -1), 1); } if (toggle_db[obj_i] == true) { all_thresh_af.setTo(obj.ColorRGB, thresholdMat2); } all_thresh_afct.setTo(new Scalar(obj_i + 1), thresholdMat2); color_filter.Add(thresholdMat2.clone()); Imgproc.findContours(thresholdMat2, all_cts, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); Scalar c = obj.getColor(); for (int ct_i = 0; ct_i < all_cts.Count; ct_i++) { double area = Imgproc.contourArea(all_cts[ct_i]); // if (area < MIN_OBJECT_AREA) if (area < MIN_OBJECT_AREAS[obj_i] * 0.55) { all_cts.RemoveAt(ct_i); ct_i--; } if (area > MAX_OBJECT_AREAS[obj_i] * 1.3) { all_cts.RemoveAt(ct_i); ct_i--; } } MyShape chon = null; MyShape ms = new MyShape(); float dt = 1000000; for (int ct_i = 0; ct_i < all_cts.Count; ct_i++) { var ct = all_cts[ct_i]; var peri = Imgproc.arcLength(new MatOfPoint2f(ct.toArray()), true); var epsilon = 0.1 * peri; if (obj_i == (int)tgr.ORANGE || obj_i == (int)tgr.YELLOW) { epsilon = 0.065 * peri; } Imgproc.approxPolyDP(new MatOfPoint2f(ct.toArray()), approx_ct, epsilon, true); MatOfInt pts_cvh = new MatOfInt(); Imgproc.convexHull(ct, pts_cvh, true); var cvh_numPts = pts_cvh.toArray().Length; Point[] cvh_pts = new Point[cvh_numPts]; var ct_pts = ct.toArray(); for (int i = 0; i < cvh_numPts; i++) { var i1 = pts_cvh.toArray()[i]; var p1 = ct_pts[i1]; cvh_pts[i] = p1; try { if (debug == true) { var i2 = pts_cvh.toArray()[(i + 1) % cvh_numPts]; var p2 = ct_pts[i2]; Imgproc.circle(rgbMat2, p1, 1, c, 2); } } catch (Exception e) { Utilities.LogFormat("Here3:{0},{1},{2}", rgbMat2 == null, p1 == null, c == null); Utilities.Log("Exception is {0}", e.ToString()); Utilities.Log("Trace is {0}", e.StackTrace.ToString()); } } MatOfPoint2f approx_cvh = new MatOfPoint2f(); var epsilon2 = peri * 0.1; if (obj_i == (int)tgr.ORANGE) { epsilon2 = peri * 0.065; } Imgproc.approxPolyDP(new MatOfPoint2f(cvh_pts), approx_cvh, epsilon2, true); var ct_ori = new MatOfPoint(ct.toArray()); MatOfPoint approx_ct2 = new MatOfPoint(approx_ct.toArray()); List <MatOfPoint> approx_cvh2 = new List <MatOfPoint>(); approx_cvh2.Add(new MatOfPoint(approx_cvh.toArray())); var mu = Imgproc.moments(approx_cvh2[0], true); cterTgr.x = mu.m10 / mu.m00; cterTgr.y = mu.m01 / mu.m00; if (approx_ct2.size().height == 3 | approx_ct2.size().height == 4) { var points = approx_cvh2[0].toArray(); var numpoints = points.Length; ms._id = obj_i; ms.ps = new Point[numpoints]; double rat = 1.16; if (obj_i == (int)tgr.PURPLE) { rat = 1.20; } else if (obj_i == (int)tgr.LIGHTBLUE) { rat = 1.20; } else if (obj_i == (int)tgr.RED | obj_i == (int)tgr.BLUE) { rat = 1.09; } else if (obj_i == (int)tgr.YELLOW) { rat = 1.10; } else if (obj_i == (int)tgr.ORANGE) { rat = 1.10; } else if (obj_i == (int)tgr.GREEN) { rat = 1.10; } var ind_huyen = 0; var max = -1d; if (numpoints == 3 || numpoints == 4) { for (int p_i = 0; p_i < numpoints; p_i++) { var p = points[p_i]; var p2 = points[(p_i + 1) % numpoints]; var vect = p - cterTgr; vect = vect * rat; var p_new = cterTgr + vect; points[p_i].x = (int)(p_new.x * 100) / 100f; points[p_i].y = (int)(p_new.y * 100) / 100f; if (numpoints == 4) { ms.ps[p_i] = p_new; } if (numpoints == 3) { var vt = p2 - p; var length = vt.x * vt.x + vt.y * vt.y; if (length > max) { ind_huyen = p_i; max = length; } } } } if (numpoints == 3) { var i_nhon1 = ind_huyen; var i_nhon2 = (ind_huyen + 1) % numpoints; var i_vuong = (ind_huyen + 2) % numpoints; ms.ps[0] = points[i_vuong]; ms.ps[1] = points[i_nhon1]; ms.ps[2] = points[i_nhon2]; } else if (numpoints == 4) { if (obj_i == (int)tgr.ORANGE) { var vt_cheo1 = ms.ps[0] - ms.ps[2]; var vt_cheo2 = ms.ps[1] - ms.ps[3]; var leng_cheo1 = vt_cheo1.x * vt_cheo1.x + vt_cheo1.y * vt_cheo1.y; var leng_cheo2 = vt_cheo2.x * vt_cheo2.x + vt_cheo2.y * vt_cheo2.y; var i_nhon = 0; if (leng_cheo2 > leng_cheo1) { i_nhon = 1; } ms.ps[0] = points[i_nhon]; ms.ps[1] = points[(i_nhon + 1)]; ms.ps[2] = points[(i_nhon + 2)]; ms.ps[3] = points[(i_nhon + 3) % numpoints]; var i_prvNhon = (i_nhon + 4 - 1) % numpoints; var i_aftNhon = i_nhon + 1; var vt_prvNhon = points[i_prvNhon] - points[i_nhon]; var vt_aftNhon = points[i_aftNhon] - points[i_nhon]; var len_prvNhon = vt_prvNhon.x * vt_prvNhon.x + vt_prvNhon.y * vt_prvNhon.y; var len_aftNhon = vt_aftNhon.x * vt_aftNhon.x + vt_aftNhon.y * vt_aftNhon.y; Imgproc.line(dbMat, points[i_prvNhon], points[i_nhon], c, 1); if (len_prvNhon > len_aftNhon) { ms.isFlip = true; Imgproc.putText(dbMat, " IsFLIP", ms.ps[3], 1, 1, c, 1); } else { ms.isFlip = false; Imgproc.putText(dbMat, " IsNOTFLIP", ms.ps[3], 1, 1, c, 1); } } } var centerMat = new Point(rgbMat.width() / 2f, rgbMat.height() / 2f); var vtLech = centerMat - cterTgr; var dt2 = vtLech.x * vtLech.x + vtLech.y * vtLech.y; if (dt2 < dt) { chon = ms; } } try { Imgproc.circle(rgbMat, cterTgr, 1, c, 1); Imgproc.putText(rgbMat, mu.m00.ToString(), cterTgr, 1, 1, c, 1); } catch (Exception e) { Utilities.LogFormat("Here2:{0},{1},{2}", rgbMat == null, cterTgr == null, c == null); Utilities.Log("Exception is {0}", e.ToString()); Utilities.Log("Trace is {0}", e.StackTrace.ToString()); } //if (approx_ct2.size().height == 3 | approx_ct2.size().height == 4) break; } if (chon != null) { lms.Add(chon); var ps = chon.ps; for (int i = 0; i < ps.Length; i++) { var p1 = ps[i]; var p2 = ps[(i + 1) % ps.Length]; try { Imgproc.line(rgbMat2, p1, p2, c, 1); Imgproc.line(all_thresh_afct, p1, p2, new Scalar(255, 255, 255), 1); Imgproc.line(dbMat, p1, p2, c, 1); Imgproc.circle(dbMat, p1, 1, c); } catch (Exception e) { Utilities.LogFormat("Here1:{0},{1},{2}", rgbMat2 == null, p1 == null, p2 == null); Utilities.Log("Exception is {0}", e.ToString()); Utilities.Log("Trace is {0}", e.StackTrace.ToString()); } } } watch.Stop(); elapsedMs = watch.ElapsedMilliseconds; } TangramShape msl = new TangramShape(); msl.datas = lms; var json = JsonUtility.ToJson(msl); watch = System.Diagnostics.Stopwatch.StartNew(); trm = tangramFeatureModelList.Detect(msl.datas.ToArray()); watch.Stop(); elapsedMs = watch.ElapsedMilliseconds; mut.ReleaseMutex(); }).ObserveOnMainThread().Subscribe((rx) => { prc(trm, lms); if (debug == true) { mut.WaitOne(); if (texture != null && debug == true) { Utils.matToTexture2D(dbMat, texture); } if (dbText1 != null && debug == true) { Utils.matToTexture2D(rgbMat2copy, dbText1); } if (dbText2 != null && debug == true) { Utils.matToTexture2D(rgbMat3, dbText2); } if (dbText3 != null && debug == true) { Utils.matToTexture2D(rgbMat4, dbText3); } if (dbText4 != null && debug == true) { Utils.matToTexture2D(rgbMat, dbText4); } all_thresh_afct = all_thresh_afct * 25; Imgproc.cvtColor(rgbMat2, rgbMat2, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor(all_thresh, all_thresh, Imgproc.COLOR_RGBA2RGB); Mat a = new Mat(all_thresh.size(), CvType.CV_8UC3); Core.addWeighted(all_thresh, 0.2, rgbMat2, 0.8, 0, a); if (dbText5 != null && debug == true) { Utils.matToTexture2D(a, dbText5); } if (dbText6 != null && debug == true) { Utils.matToTexture2D(all_thresh, dbText6); } if (dbText7 != null && debug == true) { Utils.matToTexture2D(all_thresh_afct, dbText7); } if (dbText8 != null && debug == true) { Utils.matToTexture2D(all_thresh_af, dbText8); } mut.ReleaseMutex(); } }); }
void Start() { Mat mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3); Mat grayMat = new Mat(); sourceRawImage.texture = baseTexture; Utils.texture2DToMat(baseTexture, mainMat); mainMat.copyTo(grayMat); Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY); Imgproc.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0); Imgproc.threshold(grayMat, grayMat, 110, 225, Imgproc.THRESH_BINARY); Imgproc.Canny(grayMat, grayMat, 20, 190); List <MatOfPoint> contours = new List <MatOfPoint>(); Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); int num = 0; List <MatOfPoint> contours_list = new List <MatOfPoint>(); // new logic // List<MatOfPoint> contours_list = new List<MatOfPoint>(); for (int i = 0; i < contours.Count; i++) { MatOfPoint cp = contours[i]; MatOfPoint2f cn = new MatOfPoint2f(cp.toArray()); double p = Imgproc.arcLength(cn, true); MatOfPoint2f approx = new MatOfPoint2f(); Imgproc.approxPolyDP(cn, approx, 0.01 * p, true); double area = Imgproc.contourArea(contours[i]); if ((area > 30 && area < 100) && approx.toArray().Length > 8) { // Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4); contours_list.Add(contours[i]); num = num + 1; Debug.Log(area); } } // previously working // for(int i =0; i< contours.Count ; i++){ // MatOfPoint cp = contours[i]; // MatOfPoint2f cn = new MatOfPoint2f(cp.toArray()); // double p = Imgproc.arcLength(cn,true); // // fron akshay file // double area = Imgproc.contourArea(contours[i]); // if(area > 50){ // Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4); // num = num + 1; // Debug.Log(area); // } // } // for(int i =0; i< contours.Count ; i++){ // double area = Imgproc.contourArea(contours[i]); // if(area > 50){ // // Imgproc.drawContours(mainMat, contours, -1, new Scalar(0, 255, 0), 4); // contours_list.Add(contours[i]); // num = num + 1; // } // } for (int i = 0; i < contours_list.Count; i++) { Imgproc.drawContours(mainMat, contours_list, -1, new Scalar(0, 255, 0), 4); } Debug.Log("Number : " + num); info.text += (num - 1).ToString(); Texture2D finaltexture = new Texture2D(grayMat.cols(), grayMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(grayMat, finaltexture); sourceRawImage.texture = finaltexture; }
//手を検出して画像に描画する private static void _handPoseEstimationProcess(Mat rgbaMat, Color handColor) { Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1); //検出器に色を設定 detector.setHsvColor(HGColorSpuiter.ColorToScalar(handColor)); List <MatOfPoint> contours = detector.getContours(); detector.process(rgbaMat); if (contours.Count <= 0) { return; } //手の角度に傾いた外接矩形を作成 RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.Count; i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } OpenCVForUnity.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours[boundPos].toArray())); //手首までの範囲を描画 Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), HGColorSpuiter.ColorToScalar(WristRangeColor), 2, 8, 0); double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; //手のひらの範囲を描画 Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), HGColorSpuiter.ColorToScalar(PalmsRangeColor), 2, 8, 0); //折れ線カーブまたはポリゴンを,互いの距離が指定された精度以下になるように,より少ない頂点数のカーブやポリゴンで近似します MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours[boundPos].toArray()), pointMat, 3, true); contours[boundPos] = new MatOfPoint(pointMat.toArray()); //点とポリゴンの最短距離を計算 MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours[boundPos].toArray()), hull); if (hull.toArray().Length < 3) { return; } Imgproc.convexityDefects(new MatOfPoint(contours[boundPos].toArray()), hull, convexDefect); //手の範囲を取得 List <MatOfPoint> hullPoints = new List <MatOfPoint>(); List <Point> listPo = new List <Point>(); for (int j = 0; j < hull.toList().Count; j++) { listPo.Add(contours[boundPos].toList()[hull.toList()[j]]); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.Add(e); //手の範囲を描画 Imgproc.drawContours(rgbaMat, hullPoints, -1, HGColorSpuiter.ColorToScalar(HandRangeColor), 3); //指と認識した場所を取得 List <MatOfPoint> defectPoints = new List <MatOfPoint>(); List <Point> listPoDefect = new List <Point>(); for (int j = 0; j < convexDefect.toList().Count; j = j + 4) { Point farPoint = contours[boundPos].toList()[convexDefect.toList()[j + 2]]; int depth = convexDefect.toList()[j + 3]; if (depth > depthThreashold && farPoint.y < a) { listPoDefect.Add(contours[boundPos].toList()[convexDefect.toList()[j + 2]]); } } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.Add(e2); //検出した指の本数を更新 numberOfFingers = listPoDefect.Count; if (numberOfFingers > 5) { numberOfFingers = 5; } //指の間に点を描画 foreach (Point p in listPoDefect) { Imgproc.circle(rgbaMat, p, 6, HGColorSpuiter.ColorToScalar(BetweenFingersColor), -1); } }
private Mat findPaper(Mat mainMat) { Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY); // blur image Imgproc.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0); grayMat.get(0, 0, grayPixels); for (int i = 0; i < grayPixels.Length; i++) { maskPixels[i] = 0; if (grayPixels[i] < 70) { grayPixels[i] = 0; //maskPixels [i] = 1; } else if (70 <= grayPixels[i] && grayPixels[i] < 120) { grayPixels[i] = 100; } else { grayPixels[i] = 255; //maskPixels [i] = 1; } } grayMat.put(0, 0, grayPixels); //thresholding make mage blake and white Imgproc.threshold(grayMat, grayMat, 0, 255, Imgproc.THRESH_OTSU); //extract the edge image Imgproc.Canny(grayMat, grayMat, 50, 50); //prepare for finding contours List <MatOfPoint> contours = new List <MatOfPoint>(); Imgproc.findContours(grayMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); List <MatOfPoint> tmpTargets = new List <MatOfPoint>(); for (int i = 0; i < contours.Count; i++) { MatOfPoint cp = contours[i]; MatOfPoint2f cn = new MatOfPoint2f(cp.toArray()); double p = Imgproc.arcLength(cn, true); MatOfPoint2f approx = new MatOfPoint2f(); // lager skew greater 0.03? //convert contours to readable polyagon Imgproc.approxPolyDP(cn, approx, 0.03 * p, true); //find contours with 4 points if (approx.toArray().Length == 4) { MatOfPoint approxPt = new MatOfPoint(); approx.convertTo(approxPt, CvType.CV_32S); float maxCosine = 0; float rate = 0; float min_length = 100000000000000; for (int j = 2; j < 5; j++) { Vector2 v1 = new Vector2((float)(approx.toArray()[j % 4].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j % 4].y - approx.toArray()[j - 1].y)); Vector2 v2 = new Vector2((float)(approx.toArray()[j - 2].x - approx.toArray()[j - 1].x), (float)(approx.toArray()[j - 2].y - approx.toArray()[j - 1].y)); float v1_length = Mathf.Sqrt(v1.x * v1.x + v1.y * v1.y); float v2_length = Mathf.Sqrt(v2.x * v2.x + v2.y * v2.y); min_length = Mathf.Min(Mathf.Min((float)(v1_length), (float)v2_length), min_length); if (v1_length > v2_length) { rate = v2_length / v1_length; } else { rate = v1_length / v2_length; } float angle = Mathf.Abs(Vector2.Angle(v1, v2)); maxCosine = Mathf.Max(maxCosine, angle); } if (min_length > 100 && maxCosine < 135f)// && rate >= 0.6 maxCosine < 135f && { tmpTargets.Add(approxPt); //Debug.Log("Length -----------" + min_length); //Debug.Log("------------rate" + rate + "---------------"); } } } if (tmpTargets.Count > 0) { // -----------------------DRAW RECTANGLE--------------------------- //MatOfPoint2f approxCurve = new MatOfPoint2f(); //for (int i = 0; i < tmpTargets.Count; i++) //{ // //Convert contours(i) from MatOfPoint to MatOfPoint2f // MatOfPoint2f contour2f = new MatOfPoint2f(tmpTargets[i].toArray()); // //Processing on mMOP2f1 which is in type MatOfPoint2f // double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02; // Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true); // //Convert back to MatOfPoint // MatOfPoint points = new MatOfPoint(approxCurve.toArray()); // // Get bounding rect of contour // OpenCVForUnity.Rect rect = Imgproc.boundingRect(points); // // draw enclosing rectangle (all same color, but you could use variable i to make them unique) // Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 130, 255), 3); // Imgproc.rectangle(mainMat, new Point(rect.x, rect.y), new Point(rect.x + 5, rect.y + 5), new Scalar(0, 0, 255), 5); // Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y), new Point(rect.x + +rect.width + 5, rect.y + 5), new Scalar(0, 0, 255), 5); // Imgproc.rectangle(mainMat, new Point(rect.x + rect.width, rect.y + rect.height), new Point(rect.x + +rect.width + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5); // Imgproc.rectangle(mainMat, new Point(rect.x, rect.y + rect.height), new Point(rect.x + 5, rect.y + rect.height + 5), new Scalar(0, 0, 255), 5); //} // -----------------------DRAW RECTANGLE--------------------------- // get the first contours int largestPaper = findLargestContour(tmpTargets); //Debug.Log(largestPaper); // using the largest one paperCornerMatOfPoint = tmpTargets[largestPaper]; // draw boundary Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[1], new Scalar(0, 255, 0), 3); Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[0], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3); Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[2], paperCornerMatOfPoint.toList()[3], new Scalar(0, 255, 0), 3); Imgproc.line(mainMat, paperCornerMatOfPoint.toList()[1], paperCornerMatOfPoint.toList()[2], new Scalar(0, 255, 0), 3); // extract target from the frame and adjust some angle.... Mat srcPointsMat = Converters.vector_Point_to_Mat(paperCornerMatOfPoint.toList(), CvType.CV_32F); List <Point> dstPoints = new List <Point>(); dstPoints.Add(new Point(0, 0)); dstPoints.Add(new Point(0, 300)); dstPoints.Add(new Point(200, 300)); dstPoints.Add(new Point(200, 0)); Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F); //Make perspective transform Mat m = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat); Mat warpedMat = new Mat(mainMat.size(), mainMat.type()); Imgproc.warpPerspective(mainMat, warpedMat, m, new Size(200, 300), Imgproc.INTER_LINEAR); warpedMat.convertTo(warpedMat, CvType.CV_8UC3); Texture2D finalTargetTextue = new Texture2D(warpedMat.width(), warpedMat.height(), TextureFormat.RGB24, false); Utils.matToTexture2D(warpedMat, finalTargetTextue); targetRawImage.texture = finalTargetTextue; //Debug.Log(paperCornerMatOfPoint.toList()[0].ToString() + " " + paperCornerMatOfPoint.toList()[1].ToString()+ " " + paperCornerMatOfPoint.toList()[2].ToString()+ " " + paperCornerMatOfPoint.toList()[3].ToString()); } //-------------------------------------------------------- return(mainMat); }
public int getAnswerNumber(Mat align, Rect r) { Mat roi = new Mat(align, r); Mat roi_gray = new Mat(), roi_edges = new Mat(); Imgproc.cvtColor(roi, roi_gray, Imgproc.COLOR_RGB2GRAY); Imgproc.Canny(roi_gray, roi_edges, 200, 200); // Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2 + 1, 2 + 1), new Point(1, 1)); // Imgproc.dilate(roi_edges, roi_edges, element); //Shape detection List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Imgproc.findContours(roi_edges, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); List <MatOfPoint> hulls = new List <MatOfPoint>(); for (int i = 0; i < contours.Count; i++) { MatOfInt hull_temp = new MatOfInt(); Imgproc.convexHull(contours[i], hull_temp); int[] arrIndex = hull_temp.toArray(); Point[] arrContour = contours[i].toArray(); Point[] arrPoints = new Point[arrIndex.Length]; for (int k = 0; k < arrIndex.Length; k++) { arrPoints[k] = arrContour[arrIndex[k]]; } MatOfPoint temp = new MatOfPoint(); temp.fromArray(arrPoints); //Filter outliers if (Imgproc.contourArea(temp) > 40 && Imgproc.contourArea(temp) < 200) { hulls.Add(temp); } } List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>(); for (int i = 0; i < hulls.Count; i++) { MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray()); hull2f.Add(newPoint); } for (int i = 0; i < hulls.Count; i++) { //Approximate polygon MatOfPoint2f approx = new MatOfPoint2f(); Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true); List <Point> approx_polygon = approx.toList(); approx_polygon = Scannerproc.filterPolygon(approx_polygon); double area = Imgproc.contourArea(approx); //Center of mass int cx = 0, cy = 0; for (int k = 0; k < approx_polygon.Count; k++) { cx += (int)approx_polygon[k].x; cy += (int)approx_polygon[k].y; } cx /= approx_polygon.Count; cy /= approx_polygon.Count; // Imgproc.circle(roi, new Point(cx, cy), 5, new Scalar(255), -1); // Texture2D tex = new Texture2D(roi.width(), roi.height(), TextureFormat.RGB24, false); // Utils.matToTexture2D(roi, tex); // byte[] bytes1 = tex.EncodeToJPG(); // File.WriteAllBytes("D:/2019/OMR/" + "test.png", bytes1); Point pos1 = new Point((roi.width() * 1) / 10, cy); Point pos2 = new Point((roi.width() * 3) / 10, cy); Point pos3 = new Point((roi.width() * 5) / 10, cy); Point pos4 = new Point((roi.width() * 7) / 10, cy); Point pos5 = new Point((roi.width() * 9) / 10, cy); Point nowPos = new Point(cx, cy); double[] dist = new double[5]; dist[0] = Scannerproc.distanceTwoPoints(pos1, nowPos); dist[1] = Scannerproc.distanceTwoPoints(pos2, nowPos); dist[2] = Scannerproc.distanceTwoPoints(pos3, nowPos); dist[3] = Scannerproc.distanceTwoPoints(pos4, nowPos); dist[4] = Scannerproc.distanceTwoPoints(pos5, nowPos); int id = -1; double min_dist = 999999; for (int t = 0; t < 5; t++) { if (dist[t] < min_dist) { min_dist = dist[t]; id = t; } } return(id + 1); //return plusPoints(tl, new Point(cx, cy)); } return(0); }
// Update is called once per frame void Update() { Resources.UnloadUnusedAssets(); //Fixes the memory leak //Get new picture from camera imgTexture = new Texture2D(webcamTexture.width, webcamTexture.height); imgTexture.SetPixels(webcamTexture.GetPixels()); imgTexture.Apply(); Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, imgMat); Mat maskMat = new Mat(); Mat maskMatOP = new Mat(); Mat grayMat = new Mat(); Imgproc.dilate(imgMat, imgMat, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(1, 1))); //Grayscale the picture Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGB2GRAY); //Blur the picture Imgproc.GaussianBlur(grayMat, grayMat, new Size(3, 3), 1); Imgproc.equalizeHist(grayMat, grayMat); //Find Edges Mat edgesOfPicture = new Mat(); Imgproc.Canny(grayMat, edgesOfPicture, 75, 225); List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Imgproc.findContours(edgesOfPicture, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); MatOfPoint2f matOfPoint2f = new MatOfPoint2f(); MatOfPoint2f approxCurve = new MatOfPoint2f(); List <Rect> rectPre = new List <Rect>(); List <Rect> rectAfter = new List <Rect>(); try { List <MatOfPoint2f> kvadrater = new List <MatOfPoint2f>(); for (int idx = 0; idx >= 0; idx = (int)hierarchy.get(0, idx)[0]) { MatOfPoint contour = contours[idx]; Rect rect = Imgproc.boundingRect(contour); double contourArea = Imgproc.contourArea(contour); matOfPoint2f.fromList(contour.toList()); Imgproc.approxPolyDP(matOfPoint2f, approxCurve, Imgproc.arcLength(matOfPoint2f, true) * 0.02, true); long total = approxCurve.total(); if (total > 0) { kvadrater.Add(approxCurve); ArrayList cos = new ArrayList(); Point[] points = approxCurve.toArray(); for (int j = 2; j < total + 1; j++) { cos.Add(angle(points[(int)(j % total)], points[j - 2], points[j - 1])); } cos.Sort(); Double minCos = (Double)cos[0]; Double maxCos = (Double)cos[cos.Count - 1]; bool isRect = total == 4 && minCos >= -0.1 && maxCos <= 0.3; //List<double[]> Colors = new List<double[]>(); if (isRect) { if (rect.width > 20) { rectPre.Add(rect); } List <Color> Colors = new List <Color>(); List <double[]> colorDoubles = new List <double[]>(); for (int op = 0; op < 9; op++) { if (rectPre.Count == 9) { // print("Pre verify: " + rectPre.ToString()); //rectPre = CoordinateVerifier.Verify(rectPre); Använd inte LINQ !! ! ! ! ! // print("After verify: " + rectPre.ToString()); var punkt = imgTexture.GetPixel(rect.x + (rect.width / 2), rect.y + (rect.height / 2)); Imgproc.putText(imgMat, op.ToString(), new Point(rectPre[op].x + 20, rectPre[op].y + 30), Core.FONT_HERSHEY_DUPLEX, 3, new Scalar(200)); Rgb rgb = new Rgb(punkt.r, punkt.g, punkt.b); // print("rect[" + op + "] was found at" + rect.x.ToString() + "and y: " + rect.y.ToString()); var hsv = rgb.To <Hsv>(); String farg = "Ingen farg"; if (hsv.H >= 45 && hsv.H <= 70) { farg = "Gul"; } if (hsv.H >= 10 && hsv.H <= 45) { farg = "Orange"; } // print(farg); Colors.Clear(); for (int q = 0; q < rectPre.Count; q++) { Color[] blockOfColour = imgTexture.GetPixels(rectPre[q].x + (rectPre[q].width / 2), rectPre[q].y + (rectPre[q].height / 2), rectPre[q].width / 3, rectPre[q].height / 3, 0); float r = 0, g = 0, b = 0; foreach (Color pixelBlock in blockOfColour) { r += pixelBlock.r; g += pixelBlock.g; b += pixelBlock.b; } r = r / blockOfColour.Length; g = g / blockOfColour.Length; b = b / blockOfColour.Length; var eColor = _colorDetection.ColorEnumFromScalarColor(new double[] { r * 255, g * 255, b * 255 }); var color = ColorDetection.UnityColorFromEnum(eColor); Colors.Add(color); } if (Colors.Count == 9) { ColorTracker.Instance.addToTemp(Colors); foreach (Color c in Colors) { // print(c.ToString()); } } } } Imgproc.drawContours(imgMat, contours, idx, new Scalar(255, 100, 155), 4); } } } } catch (ArgumentOutOfRangeException e) { } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; }
// Update is called once per frame void Update() { Resources.UnloadUnusedAssets(); //Fixes the memory leak //Get new picture from camera imgTexture = new Texture2D(webcamTexture.width, webcamTexture.height); imgTexture.SetPixels(webcamTexture.GetPixels()); imgTexture.Apply(); Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, imgMat); Mat processedMat = new Mat(); Imgproc.dilate(imgMat, imgMat, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(1, 1))); //Grayscale the picture Imgproc.cvtColor(imgMat, processedMat, Imgproc.COLOR_RGB2GRAY); //Blur the picture Imgproc.GaussianBlur(processedMat, processedMat, new Size(3, 3), 1); Imgproc.equalizeHist(processedMat, processedMat); //Find Edges Mat edgesOfPicture = new Mat(); Imgproc.Canny(processedMat, edgesOfPicture, 75, 225); List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Imgproc.findContours(edgesOfPicture, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); MatOfPoint2f matOfPoint2f = new MatOfPoint2f(); MatOfPoint2f approxCurve = new MatOfPoint2f(); List <Rect> rects = new List <Rect>(); try { for (int idx = 0; idx >= 0; idx = (int)hierarchy.get(0, idx)[0]) { MatOfPoint contour = contours[idx]; Rect rect = Imgproc.boundingRect(contour); double contourArea = Imgproc.contourArea(contour); matOfPoint2f.fromList(contour.toList()); Imgproc.approxPolyDP(matOfPoint2f, approxCurve, Imgproc.arcLength(matOfPoint2f, true) * 0.02, true); long total = approxCurve.total(); if (total == 4) { ArrayList cos = new ArrayList(); Point[] points = approxCurve.toArray(); for (int j = 2; j < total + 1; j++) { cos.Add(angle(points[(int)(j % total)], points[j - 2], points[j - 1])); } cos.Sort(); Double minCos = (Double)cos[0]; Double maxCos = (Double)cos[cos.Count - 1]; bool isRect = total == 4 && minCos >= -0.1 && maxCos <= 0.3; if (isRect) { if (rect.width > 20) { rects.Add(rect); } List <double[]> Colors = new List <double[]>(); for (int op = 0; op < 10; op++) { if (rects.Count == 9) { allCubiesScaned = true; Color[] blockOfColour = imgTexture.GetPixels(rect.x + rect.width / 2, rect.y + rect.height, rect.width / 3, rect.height / 3, 0); float r = 0, g = 0, b = 0; foreach (Color pixelBlock in blockOfColour) { r += pixelBlock.r; g += pixelBlock.g; b += pixelBlock.b; } r = r / blockOfColour.Length; g = g / blockOfColour.Length; b = b / blockOfColour.Length; Rgb rgb = new Rgb(r, g, b); Colors.Add(new double[] { rgb.R * 255, rgb.G * 255, rgb.B * 255 }); print(Colors.Count); if (Colors.Count == 9) { ColorMap.Colors = Colors; ColorMap.Redraw(); } } } Imgproc.drawContours(imgMat, contours, idx, new Scalar(255, 100, 155), 4); } } } } catch (ArgumentOutOfRangeException e) { } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; }