public void MarkerProcessing(Mat MatImg) { if (MatImg != null) { Imgproc.cvtColor(MatImg, MatImg, Imgproc.COLOR_RGBA2RGB); Aruco.detectMarkers(MatImg, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); if (ids.total() > 0) { OpenCVForUnity.UtilsModule.Converters.Mat_to_vector_int(ids, list_ids); // draw markers. Aruco.drawDetectedMarkers(MatImg, corners, ids, new Scalar(0, 255, 0)); // estimate pose. if (applyEstimationPose) { EstimatePoseCanonicalMarker(MatImg); } if (showRejectedCorners && rejectedCorners.Count > 0) { Aruco.drawDetectedMarkers(MatImg, rejectedCorners, new Mat(), new Scalar(255, 0, 0)); } } } }
private void OnDetectionDone() { if (displayCameraPreview) { Imgproc.cvtColor(downScaleRgbaMat, rgbMat4preview, Imgproc.COLOR_RGBA2RGB); if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(255, 0, 0)); for (int i = 0; i < ids.total(); i++) { Aruco.drawAxis(rgbMat4preview, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f); } } OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat4preview, texture); } if (applyEstimationPose) { if (hasUpdatedARTransformMatrix) { hasUpdatedARTransformMatrix = false; // Apply camera transform matrix. ARM = arCamera.transform.localToWorldMatrix * ARM; ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM); } } isDetecting = false; }
void ArucoDetection() { // Detect ArUco markers Dictionary dict = Aruco.getPredefinedDictionary(Aruco.DICT_4X4_1000); Aruco.detectMarkers(cached_initMat, dict, corners, ids); Aruco.drawDetectedMarkers(cached_initMat, corners, ids); // Debug.Log("AD - 93: Markers Detected"); // Debug.LogFormat("Corners: {0}", corners.Count); // Get desired corner of marker Point[] src_point_array = new Point[POINT_COUNT]; for (int i = 0; i < corners.Count; i++) { int aruco_id = (int)(ids.get(i, 0)[0]); int src_i = arucoTosrc(aruco_id); int corner_i = aruco_id % 4; // Debug.LogFormat("AD - 101: aruco_id: {0}; corner_i: {1}; src_i: {2}", aruco_id, corner_i, src_i); // Store corner[i] into spa[src_i] src_point_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]); // Display the corner as circle on outMat. Imgproc.circle(cached_initMat, src_point_array[src_i], 10, new Scalar(255, 255, 0)); } // Converting to Ray values for Raycast Camera _cam = Camera.main; if (_cam != null) { for (int i = 0; i < POINT_COUNT; i++) { if (src_point_array[i] != null) { src_ray_array[i] = _cam.ScreenPointToRay( new Vector3((float)src_point_array[i].x, (float)src_point_array[i].y, 0)).direction; } } } // Debug.LogFormat("Detected Direction: {0}", src_ray_array[0]); // Debug.LogFormat("Camera Direction: {0}", _cam.transform.forward); // Count non-null source points bool spa_full = (count_src_nulls() == 7); // Check if have valid faces for (int i = 0; i < FACE_COUNT; i++) { // faceX_full[i] = check_faces(i); faceX_full[i] = check_faces(i); } Core.flip(cached_initMat, outMat, 0); }
void ArucoDetection() { Dictionary dict = Aruco.getPredefinedDictionary(Aruco.DICT_4X4_1000); Aruco.detectMarkers(cached_initMat, dict, corners, ids); Aruco.drawDetectedMarkers(cached_initMat, corners, ids); Debug.Log("AD: Markers Detected"); src_recent_array = new Point[7]; for (int i = 0; i < corners.Count; i++) { int aruco_id = (int)(ids.get(i, 0)[0]); int src_i = arucoTosrc(aruco_id); int corner_i = aruco_id % 4; // Store corner[i] into spa[src_i] src_point_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]); src_recent_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]); Debug.LogFormat("aruco_id: {0}; corner: {1}; src_i: {2}", aruco_id, src_point_array[src_i], src_i); // Display the corner as circle on outMat. Imgproc.circle(cached_initMat, src_point_array[src_i], 10, new Scalar(255, 255, 0)); } Debug.Log("AD: src_point_array and recent populated"); // Count non-null source points int markerCount = count_src_nulls(); Debug.LogFormat("AD: markerCount = {0}", markerCount); m_ImageInfo.text = string.Format("Number of markers detected: {0}", markerCount); spa_full = (markerCount == 7); // Check if have valid faces for (int i = 0; i < 3; i++) { // faceX_full[i] = check_faces(i); faceX_full[i] = check_faces(i); } Debug.LogFormat("AD: full faces: 1-{0}, 2-{1}, 3-{2}", faceX_full[0], faceX_full[1], faceX_full[2]); for (int i = 0; i < 3; i++) { // faceX_full[i] = check_faces(i); faceX_recent_full[i] = check_recent_faces(i); } Debug.LogFormat("AD: recent full faces: 1-{0}, 2-{1}, 3-{2}", faceX_recent_full[0], faceX_recent_full[1], faceX_recent_full[2]); Core.flip(cached_initMat, outMat, 0); Debug.Log("AD: done"); }
private void OnDetectionDone() { DebugUtils.TrackTick(); if (displayCameraPreview) { Imgproc.cvtColor(downScaleMat, rgbMat4preview, Imgproc.COLOR_GRAY2RGB); if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0)); if (applyEstimationPose) { for (int i = 0; i < ids.total(); i++) { using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) { // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue) Calib3d.drawFrameAxes(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f); } } } } Utils.fastMatToTexture2D(rgbMat4preview, texture); } if (applyEstimationPose) { if (hasUpdatedARTransformMatrix) { hasUpdatedARTransformMatrix = false; // Apply the cameraToWorld matrix with the Z-axis inverted. ARM = arCamera.cameraToWorldMatrix * invertZM * ARM; if (enableLerpFilter) { arGameObject.SetMatrix4x4(ARM); } else { ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM); } } } isDetecting = false; }
public Mat MarkerProcessing(Mat MatImg) { Mat resultMat = new Mat(); Imgproc.cvtColor(MatImg, resultMat, Imgproc.COLOR_RGBA2RGB); Aruco.detectMarkers(resultMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); // add the time to each marker // it is not premitted to change value in foreach List <int> temp_list = new List <int>(); temp_list.AddRange(Timer.Keys); foreach (int key in temp_list) { Timer[key] = Timer[key] + Time.deltaTime; if (Timer[key] > waitTime) { GameObject parentObject = GameObject.Find("ARGameObjects"); GameObject HidearGameObject = parentObject.transform.Find(idDict[key]).gameObject; HidearGameObject.SetActive(false); Timer[key] = 0.0f; } } if (ids.total() > 0) { OpenCVForUnity.UtilsModule.Converters.Mat_to_vector_int(ids, list_ids); // draw markers. Aruco.drawDetectedMarkers(resultMat, corners, ids, new Scalar(0, 255, 0)); // estimate pose. if (applyEstimationPose) { EstimatePoseCanonicalMarker(resultMat); } if (showRejectedCorners && rejectedCorners.Count > 0) { Aruco.drawDetectedMarkers(resultMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0)); } } //return black imgae if we need Mat BlackImage = new Mat(resultMat.rows(), resultMat.cols(), CvType.CV_8UC1, new Scalar(0, 0, 0)); //Mat BlackImage = new Mat(resultMat.rows(), resultMat.cols(), CvType.CV_8UC1, new Scalar(255,255, 255)); return(resultMat); }
void ArucoDetection() { Dictionary dict = Aruco.getPredefinedDictionary(Aruco.DICT_4X4_1000); Aruco.detectMarkers(cached_initMat, dict, corners, ids); Aruco.drawDetectedMarkers(cached_initMat, corners, ids); src_recent_array = new Point[7]; int markerCount = count_src_nulls(); for (int i = 0; i < corners.Count; i++) { int aruco_id = (int)(ids.get(i, 0)[0]); int src_i = arucoTosrc(aruco_id); int corner_i = aruco_id % 4; // Store corner[i] into spa[src_i] src_point_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]); src_recent_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]); // Display the corner as circle on outMat. // Imgproc.circle(cached_initMat, src_point_array[src_i], 5, new Scalar(255, 0, 255)); } // Count non-null source points spa_full = (markerCount == 7); // Check if have valid faces for (int i = 0; i < 3; i++) { // faceX_full[i] = check_faces(i); faceX_full[i] = check_faces(i); } for (int i = 0; i < 3; i++) { // faceX_full[i] = check_faces(i); faceX_recent_full[i] = check_recent_faces(i); } }
public void OnFrameMatAcquired(Mat grayMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics) { isDetectingInFrameArrivedThread = true; DebugUtils.VideoTick(); Mat downScaleMat = null; float DOWNSCALE_RATIO; if (enableDownScale) { downScaleMat = imageOptimizationHelper.GetDownScaleMat(grayMat); DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio; } else { downScaleMat = grayMat; DOWNSCALE_RATIO = 1.0f; } Mat camMatrix = null; MatOfDouble distCoeffs = null; if (useStoredCameraParameters) { camMatrix = this.camMatrix; distCoeffs = this.distCoeffs; } else { camMatrix = CreateCameraMatrix(cameraIntrinsics.FocalLengthX, cameraIntrinsics.FocalLengthY, cameraIntrinsics.PrincipalPointX / DOWNSCALE_RATIO, cameraIntrinsics.PrincipalPointY / DOWNSCALE_RATIO); distCoeffs = new MatOfDouble(cameraIntrinsics.RadialDistK1, cameraIntrinsics.RadialDistK2, cameraIntrinsics.RadialDistK3, cameraIntrinsics.TangentialDistP1, cameraIntrinsics.TangentialDistP2); } if (enableDetection) { // Detect markers and estimate Pose Aruco.detectMarkers(downScaleMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); if (applyEstimationPose && ids.total() > 0) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); for (int i = 0; i < ids.total(); i++) { //This example can display ARObject on only first detected marker. if (i == 0) { // Convert to unity pose data. double[] rvecArr = new double[3]; rvecs.get(0, 0, rvecArr); double[] tvecArr = new double[3]; tvecs.get(0, 0, tvecArr); tvecArr[2] /= DOWNSCALE_RATIO; PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr); // Create transform matrix. transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one); lock (sync) { // Right-handed coordinates system (OpenCV) to left-handed one (Unity) ARM = invertYM * transformationM; // Apply Z-axis inverted matrix. ARM = ARM * invertZM; } hasUpdatedARTransformMatrix = true; break; } } } } Mat rgbMat4preview = null; if (displayCameraPreview) { rgbMat4preview = new Mat(); Imgproc.cvtColor(downScaleMat, rgbMat4preview, Imgproc.COLOR_GRAY2RGB); if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0)); if (applyEstimationPose) { for (int i = 0; i < ids.total(); i++) { using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) { // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue) Calib3d.drawFrameAxes(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f); } } } } } DebugUtils.TrackTick(); Enqueue(() => { if (!webCamTextureToMatHelper.IsPlaying()) { return; } if (displayCameraPreview && rgbMat4preview != null) { Utils.fastMatToTexture2D(rgbMat4preview, texture); rgbMat4preview.Dispose(); } if (applyEstimationPose) { if (hasUpdatedARTransformMatrix) { hasUpdatedARTransformMatrix = false; lock (sync) { // Apply camera transform matrix. ARM = cameraToWorldMatrix * invertZM * ARM; if (enableLerpFilter) { arGameObject.SetMatrix4x4(ARM); } else { ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM); } } } } grayMat.Dispose(); }); isDetectingInFrameArrivedThread = false; }
private void DetectMarkers() { Utils.texture2DToMat(imgTexture, rgbMat); Debug.Log("imgMat dst ToString " + rgbMat.ToString()); gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = rgbMat.width(); float height = rgbMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } // set camera parameters. int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); // calibration camera matrix values. Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); // To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { arCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { arCamera.fieldOfView = (float)(fovy [0] * fovYScale); } // Display objects near the camera. arCamera.nearClipPlane = 0.01f; Mat ids = new Mat(); List <Mat> corners = new List <Mat> (); List <Mat> rejectedCorners = new List <Mat> (); Mat rvecs = new Mat(); Mat tvecs = new Mat(); Mat rotMat = new Mat(3, 3, CvType.CV_64FC1); DetectorParameters detectorParams = DetectorParameters.create(); Dictionary dictionary = Aruco.getPredefinedDictionary((int)dictionaryId); // detect markers. Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); // if at least one marker detected if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0)); // estimate pose. if (applyEstimationPose) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); for (int i = 0; i < ids.total(); i++) { using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) { // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue) Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f); } // This example can display the ARObject on only first detected marker. if (i == 0) { // Get translation vector double[] tvecArr = tvecs.get(i, 0); // Get rotation vector double[] rvecArr = rvecs.get(i, 0); Mat rvec = new Mat(3, 1, CvType.CV_64FC1); rvec.put(0, 0, rvecArr); // Convert rotation vector to rotation matrix. Calib3d.Rodrigues(rvec, rotMat); double[] rotMatArr = new double[rotMat.total()]; rotMat.get(0, 0, rotMatArr); // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4. Matrix4x4 transformationM = new Matrix4x4(); // from OpenCV transformationM.SetRow(0, new Vector4((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0])); transformationM.SetRow(1, new Vector4((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1])); transformationM.SetRow(2, new Vector4((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); // right-handed coordinates system (OpenCV) to left-handed one (Unity) // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix Matrix4x4 ARM = invertYM * transformationM * invertYM; if (shouldMoveARCamera) { ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM); } else { ARM = arCamera.transform.localToWorldMatrix * ARM; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM); } } } } } if (showRejectedCorners && rejectedCorners.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0)); } Utils.matToTexture2D(rgbMat, texture); }
// Use this for initialization void Start() { Mat rgbMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, rgbMat); Debug.Log("imgMat dst ToString " + rgbMat.ToString()); gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = rgbMat.width(); float height = rgbMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } // set cameraparam. int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); // calibration camera. Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); // To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } Mat ids = new Mat(); List <Mat> corners = new List <Mat> (); List <Mat> rejected = new List <Mat> (); Mat rvecs = new Mat(); Mat tvecs = new Mat(); Mat rotMat = new Mat(3, 3, CvType.CV_64FC1); DetectorParameters detectorParams = DetectorParameters.create(); Dictionary dictionary = Aruco.getPredefinedDictionary(dictionaryId); // detect markers. Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected); // estimate pose. if (applyEstimationPose && ids.total() > 0) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); } if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0)); if (applyEstimationPose) { for (int i = 0; i < ids.total(); i++) { // Debug.Log ("ids.dump() " + ids.dump ()); Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f); // This example can display ARObject on only first detected marker. if (i == 0) { // position double[] tvec = tvecs.get(i, 0); // rotation double[] rv = rvecs.get(i, 0); Mat rvec = new Mat(3, 1, CvType.CV_64FC1); rvec.put(0, 0, rv[0]); rvec.put(1, 0, rv[1]); rvec.put(2, 0, rv[2]); Calib3d.Rodrigues(rvec, rotMat); Matrix4x4 transformationM = new Matrix4x4(); // from OpenCV transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0])); transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1])); transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); // right-handed coordinates system (OpenCV) to left-handed one (Unity) Matrix4x4 ARM = invertYM * transformationM; // Apply Z axis inverted matrix. ARM = ARM * invertZM; if (shouldMoveARCamera) { ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); } else { ARM = ARCamera.transform.localToWorldMatrix * ARM; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); } } } } } if (showRejected && rejected.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255)); } Texture2D texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(rgbMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB); // detect markers. Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); // refine marker detection. if (refineMarkerDetection && (markerType == MarkerType.GridBoard || markerType == MarkerType.ChArUcoBoard)) { switch (markerType) { case MarkerType.GridBoard: Aruco.refineDetectedMarkers(rgbMat, gridBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams); break; case MarkerType.ChArUcoBoard: Aruco.refineDetectedMarkers(rgbMat, charucoBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams); break; } } // if at least one marker detected if (ids.total() > 0) { if (markerType != MarkerType.ChArUcoDiamondMarker) { if (markerType == MarkerType.ChArUcoBoard) { Aruco.interpolateCornersCharuco(corners, ids, rgbMat, charucoBoard, charucoCorners, charucoIds, camMatrix, distCoeffs, charucoMinMarkers); // draw markers. Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0)); if (charucoIds.total() > 0) { Aruco.drawDetectedCornersCharuco(rgbMat, charucoCorners, charucoIds, new Scalar(0, 0, 255)); } } else { // draw markers. Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0)); } // estimate pose. if (applyEstimationPose) { switch (markerType) { default: case MarkerType.CanonicalMarker: EstimatePoseCanonicalMarker(rgbMat); break; case MarkerType.GridBoard: EstimatePoseGridBoard(rgbMat); break; case MarkerType.ChArUcoBoard: EstimatePoseChArUcoBoard(rgbMat); break; } } } else { // detect diamond markers. Aruco.detectCharucoDiamond(rgbMat, corners, ids, diamondSquareLength / diamondMarkerLength, diamondCorners, diamondIds, camMatrix, distCoeffs); // draw markers. Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0)); // draw diamond markers. Aruco.drawDetectedDiamonds(rgbMat, diamondCorners, diamondIds, new Scalar(0, 0, 255)); // estimate pose. if (applyEstimationPose) { EstimatePoseChArUcoDiamondMarker(rgbMat); } } } if (showRejectedCorners && rejectedCorners.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0)); } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB); // detect markers and estimate pose Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected); // Aruco.detectMarkers (imgMat, dictionary, corners, ids); if (estimatePose && ids.total() > 0) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); } // draw results if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0)); if (estimatePose) { for (int i = 0; i < ids.total(); i++) { Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f); //This sample can display ARObject on only first detected marker. if (i == 0) { Calib3d.Rodrigues(rvecs, rotMat); transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvecs.get(0, 0) [0])); transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvecs.get(0, 0) [1])); transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvecs.get(0, 0) [2])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); if (shouldMoveARCamera) { ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM; // Debug.Log ("ARM " + ARM.ToString ()); ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); } else { ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM; // Debug.Log ("ARM " + ARM.ToString ()); ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); } } } } } if (showRejected && rejected.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255)); } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
public List <ZOArucoTrackerDetection> DetectMarkers(Mat rgbMat) { List <ZOArucoTrackerDetection> results = new List <ZOArucoTrackerDetection>(); // Debug.Log("imgMat dst ToString " + rgbMat.ToString()); float width = rgbMat.width(); float height = rgbMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { // Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { // Camera.main.orthographicSize = height / 2; } // set camera parameters. int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); // Debug.Log("camMatrix " + camMatrix.dump()); MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0); // Debug.Log("distCoeffs " + distCoeffs.dump()); // calibration camera matrix values. Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); // Debug.Log("imageSize " + imageSize.ToString()); // Debug.Log("apertureWidth " + apertureWidth); // Debug.Log("apertureHeight " + apertureHeight); // Debug.Log("fovx " + fovx[0]); // Debug.Log("fovy " + fovy[0]); // Debug.Log("focalLength " + focalLength[0]); // Debug.Log("principalPoint " + principalPoint.ToString()); // Debug.Log("aspectratio " + aspectratio[0]); // To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); // Debug.Log("fovXScale " + fovXScale); // Debug.Log("fovYScale " + fovYScale); Mat ids = new Mat(); List <Mat> corners = new List <Mat>(); List <Mat> rejectedCorners = new List <Mat>(); Mat rvecs = new Mat(); Mat tvecs = new Mat(); Mat rotMat = new Mat(3, 3, CvType.CV_64FC1); DetectorParameters detectorParams = DetectorParameters.create(); Dictionary dictionary = Aruco.getPredefinedDictionary((int)dictionaryId); // detect markers. Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); // Debug.Log("INFO: Number of markers detected: " + ids.total()); // if at least one marker detected if (ids.total() > 0) { if (_debug) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0)); } // estimate pose. Aruco.estimatePoseSingleMarkers(corners, _markerLengthMeters, camMatrix, distCoeffs, rvecs, tvecs); for (int i = 0; i < ids.total(); i++) { // Get translation vector double[] tvecArr = tvecs.get(i, 0); // Get rotation vector double[] rvecArr = rvecs.get(i, 0); Mat rvec = new Mat(3, 1, CvType.CV_64FC1); rvec.put(0, 0, rvecArr); // Convert rotation vector to rotation matrix. Calib3d.Rodrigues(rvec, rotMat); double[] rotMatArr = new double[rotMat.total()]; rotMat.get(0, 0, rotMatArr); // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4. Matrix4x4 transformationM = new Matrix4x4(); // from OpenCV transformationM.SetRow(0, new Vector4((float)rotMatArr[0], (float)rotMatArr[1], (float)rotMatArr[2], (float)tvecArr[0])); transformationM.SetRow(1, new Vector4((float)rotMatArr[3], (float)rotMatArr[4], (float)rotMatArr[5], (float)tvecArr[1])); transformationM.SetRow(2, new Vector4((float)rotMatArr[6], (float)rotMatArr[7], (float)rotMatArr[8], (float)tvecArr[2])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); // Debug.Log("transformationM " + transformationM.ToString()); ZOArucoTrackerDetection detection = new ZOArucoTrackerDetection(); int [] currentId = new int[1]; // ids.get(0, i, currentId); ids.get(i, 0, currentId); detection.arucoId = currentId[0]; detection.transform = transformationM; results.Add(detection); } } return(results); }
public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix) { downScaleFrameMat = imageOptimizationHelper.GetDownScaleMat(bgraMat); if (enableDetection) { Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY); // Detect markers and estimate Pose Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); if (applyEstimationPose && ids.total() > 0) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); for (int i = 0; i < ids.total(); i++) { //This example can display ARObject on only first detected marker. if (i == 0) { // Convert to unity pose data. double[] rvecArr = new double[3]; rvecs.get(0, 0, rvecArr); double[] tvecArr = new double[3]; tvecs.get(0, 0, tvecArr); tvecArr[2] /= imageOptimizationHelper.downscaleRatio; PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr); // Changes in pos/rot below these thresholds are ignored. if (enableLowPassFilter) { ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass); } oldPoseData = poseData; // Create transform matrix. transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one); lock (sync){ // Right-handed coordinates system (OpenCV) to left-handed one (Unity) ARM = invertYM * transformationM; // Apply Z-axis inverted matrix. ARM = ARM * invertZM; } hasUpdatedARTransformMatrix = true; break; } } } } Mat rgbMat4preview = null; if (displayCameraPreview) { rgbMat4preview = new Mat(); Imgproc.cvtColor(downScaleFrameMat, rgbMat4preview, Imgproc.COLOR_BGRA2RGB); if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0)); if (applyEstimationPose) { for (int i = 0; i < ids.total(); i++) { using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.Rect(0, i, 1, 1))) using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.Rect(0, i, 1, 1))) { // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue) Aruco.drawAxis(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f); } } } } } UnityEngine.WSA.Application.InvokeOnAppThread(() => { if (!webCamTextureToMatHelper.IsPlaying()) { return; } if (displayCameraPreview && rgbMat4preview != null) { OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat4preview, texture); } if (applyEstimationPose) { if (hasUpdatedARTransformMatrix) { hasUpdatedARTransformMatrix = false; lock (sync){ // Apply camera transform matrix. ARM = cameraToWorldMatrix * invertZM * ARM; ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM); } } } bgraMat.Dispose(); if (rgbMat4preview != null) { rgbMat4preview.Dispose(); } }, false); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB); // detect markers. Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected); // estimate pose. if (applyEstimationPose && ids.total() > 0) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); } if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0)); if (applyEstimationPose) { for (int i = 0; i < ids.total(); i++) { Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f); // This example can display ARObject on only first detected marker. if (i == 0) { // position double[] tvec = tvecs.get(i, 0); // rotation double[] rv = rvecs.get(i, 0); Mat rvec = new Mat(3, 1, CvType.CV_64FC1); rvec.put(0, 0, rv [0]); rvec.put(1, 0, rv [1]); rvec.put(2, 0, rv [2]); Calib3d.Rodrigues(rvec, rotMat); transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0])); transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1])); transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); // right-handed coordinates system (OpenCV) to left-handed one (Unity) ARM = invertYM * transformationM; // Apply Z axis inverted matrix. ARM = ARM * invertZM; if (shouldMoveARCamera) { ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse; ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); } else { ARM = ARCamera.transform.localToWorldMatrix * ARM; ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); } } } } } if (showRejected && rejected.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255)); } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
private void DrawFrame(Mat grayMat, Mat bgrMat) { Imgproc.cvtColor(grayMat, bgrMat, Imgproc.COLOR_GRAY2BGR); switch (markerType) { default: case MarkerType.ChArUcoBoard: // detect markers. Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); // refine marker detection. if (refineMarkerDetection) { Aruco.refineDetectedMarkers(grayMat, charucoBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams); } // if at least one marker detected if (ids.total() > 0) { Aruco.interpolateCornersCharuco(corners, ids, grayMat, charucoBoard, charucoCorners, charucoIds, camMatrix, distCoeffs, charucoMinMarkers); // draw markers. Aruco.drawDetectedMarkers(bgrMat, corners, ids, new Scalar(0, 255, 0, 255)); // if at least one charuco corner detected if (charucoIds.total() > 0) { Aruco.drawDetectedCornersCharuco(bgrMat, charucoCorners, charucoIds, new Scalar(0, 0, 255, 255)); } } break; case MarkerType.ChessBoard: case MarkerType.CirclesGlid: case MarkerType.AsymmetricCirclesGlid: // detect markers. MatOfPoint2f points = new MatOfPoint2f(); bool found = false; switch (markerType) { default: case MarkerType.ChessBoard: found = Calib3d.findChessboardCorners(grayMat, new Size((int)squaresX, (int)squaresY), points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE); break; case MarkerType.CirclesGlid: found = Calib3d.findCirclesGrid(grayMat, new Size((int)squaresX, (int)squaresY), points, Calib3d.CALIB_CB_SYMMETRIC_GRID); break; case MarkerType.AsymmetricCirclesGlid: found = Calib3d.findCirclesGrid(grayMat, new Size((int)squaresX, (int)squaresY), points, Calib3d.CALIB_CB_ASYMMETRIC_GRID); break; } if (found) { if (markerType == MarkerType.ChessBoard) { Imgproc.cornerSubPix(grayMat, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1)); } // draw markers. Calib3d.drawChessboardCorners(bgrMat, new Size((int)squaresX, (int)squaresY), points, found); } break; } double[] camMatrixArr = new double[(int)camMatrix.total()]; camMatrix.get(0, 0, camMatrixArr); double[] distCoeffsArr = new double[(int)distCoeffs.total()]; distCoeffs.get(0, 0, distCoeffsArr); int ff = Imgproc.FONT_HERSHEY_SIMPLEX; double fs = 0.4; Scalar c = new Scalar(255, 255, 255, 255); int t = 0; int lt = Imgproc.LINE_AA; bool blo = false; int frameCount = (markerType == MarkerType.ChArUcoBoard) ? allCorners.Count : imagePoints.Count; Imgproc.putText(bgrMat, frameCount + " FRAME CAPTURED", new Point(bgrMat.cols() - 300, 20), ff, fs, c, t, lt, blo); Imgproc.putText(bgrMat, "IMAGE_WIDTH: " + bgrMat.width(), new Point(bgrMat.cols() - 300, 40), ff, fs, c, t, lt, blo); Imgproc.putText(bgrMat, "IMAGE_HEIGHT: " + bgrMat.height(), new Point(bgrMat.cols() - 300, 60), ff, fs, c, t, lt, blo); Imgproc.putText(bgrMat, "CALIBRATION_FLAGS: " + calibrationFlags, new Point(bgrMat.cols() - 300, 80), ff, fs, c, t, lt, blo); Imgproc.putText(bgrMat, "CAMERA_MATRIX: ", new Point(bgrMat.cols() - 300, 100), ff, fs, c, t, lt, blo); for (int i = 0; i < camMatrixArr.Length; i = i + 3) { Imgproc.putText(bgrMat, " " + camMatrixArr[i] + ", " + camMatrixArr[i + 1] + ", " + camMatrixArr[i + 2] + ",", new Point(bgrMat.cols() - 300, 120 + 20 * i / 3), ff, fs, c, t, lt, blo); } Imgproc.putText(bgrMat, "DISTORTION_COEFFICIENTS: ", new Point(bgrMat.cols() - 300, 180), ff, fs, c, t, lt, blo); for (int i = 0; i < distCoeffsArr.Length; ++i) { Imgproc.putText(bgrMat, " " + distCoeffsArr[i] + ",", new Point(bgrMat.cols() - 300, 200 + 20 * i), ff, fs, c, t, lt, blo); } Imgproc.putText(bgrMat, "AVG_REPROJECTION_ERROR: " + repErr, new Point(bgrMat.cols() - 300, 300), ff, fs, c, t, lt, blo); if (frameCount == 0) { Imgproc.putText(bgrMat, "To calibration start, please press the calibration button or do air tap gesture!", new Point(5, bgrMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB); // detect markers and estimate pose Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected); // Aruco.detectMarkers (imgMat, dictionary, corners, ids); if (estimatePose && ids.total() > 0) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); } // draw results if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0)); if (estimatePose) { for (int i = 0; i < ids.total(); i++) { Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f); //This example can display ARObject on only first detected marker. if (i == 0) { // position double[] tvec = tvecs.get(i, 0); Vector4 pos = new Vector4((float)tvec [0], (float)tvec [1], (float)tvec [2], 0); // from OpenCV // right-handed coordinates system (OpenCV) to left-handed one (Unity) ARGameObject.transform.localPosition = new Vector3(pos.x, -pos.y, pos.z); // rotation double[] rv = rvecs.get(i, 0); Mat rvec = new Mat(3, 1, CvType.CV_64FC1); rvec.put(0, 0, rv[0]); rvec.put(1, 0, rv[1]); rvec.put(2, 0, rv[2]); Calib3d.Rodrigues(rvec, rotMat); Vector3 forward = new Vector3((float)rotMat.get(0, 2) [0], (float)rotMat.get(1, 2) [0], (float)rotMat.get(2, 2) [0]); // from OpenCV Vector3 up = new Vector3((float)rotMat.get(0, 1) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(2, 1) [0]); // from OpenCV // right-handed coordinates system (OpenCV) to left-handed one (Unity) Quaternion rot = Quaternion.LookRotation(new Vector3(forward.x, -forward.y, forward.z), new Vector3(up.x, -up.y, up.z)); ARGameObject.transform.localRotation = rot; // Apply Z axis inverted matrix. invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); transformationM = ARGameObject.transform.localToWorldMatrix * invertZM; // Apply camera transform matrix. transformationM = ARCamera.transform.localToWorldMatrix * transformationM; ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref transformationM); } } } } if (showRejected && rejected.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255)); } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }