// Use this for initialization public override void Setup() { //set 3d face object points. objectPoints68 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 120), //nose (Nose top) new Point3(-26, 15, 83), //l mouse (Mouth breadth) new Point3(26, 15, 83), //r mouse (Mouth breadth) new Point3(-79, 90, 0.0), //l ear (Bitragion breadth) new Point3(79, 90, 0.0) //r ear (Bitragion breadth) ); objectPoints5 = new MatOfPoint3f( new Point3(-23, 90, 83), //l eye (Inner corner of the eye) new Point3(23, 90, 83), //r eye (Inner corner of the eye) new Point3(-50, 90, 80), //l eye (Tail of the eye) new Point3(50, 90, 80), //r eye (Tail of the eye) new Point3(0.0, 50, 120) //nose (Nose top) ); imagePoints = new MatOfPoint2f(); float width = 640; float height = 480; //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); didUpdateHeadRotation = false; }
void InitializeCameraMatrix(Mat inputImageMat, double fx, double fy, double cx, double cy, MatOfDouble distCoeffs) { Debug.Log("******************************"); float width = inputImageMat.width(); float height = inputImageMat.height(); // Set camera param _CamMatrix = new Mat(3, 3, CvType.CV_64FC1); _CamMatrix.put(0, 0, fx); _CamMatrix.put(0, 1, 0); _CamMatrix.put(0, 2, cx); _CamMatrix.put(1, 0, 0); _CamMatrix.put(1, 1, fy); _CamMatrix.put(1, 2, cy); _CamMatrix.put(2, 0, 0); _CamMatrix.put(2, 1, 0); _CamMatrix.put(2, 2, 1.0f); Debug.Log("CamMatrix " + _CamMatrix.dump()); _DistCoeffs = distCoeffs; Debug.Log("DistCoeffs " + _DistCoeffs.dump()); // Calibration camera Size imageSize = new Size(width, height); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(_CamMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("ImageSize " + imageSize.ToString()); Debug.Log("ApertureWidth " + apertureWidth); Debug.Log("ApertureHeight " + apertureHeight); Debug.Log("Fovx " + fovx [0]); Debug.Log("Fovy " + fovy [0]); Debug.Log("FocalLength " + focalLength [0]); Debug.Log("PrincipalPoint " + principalPoint.ToString()); Debug.Log("Aspectratio " + aspectratio [0]); Debug.Log("******************************"); }
private void InitializeCalibraton(Mat frameMat) { Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = frameMat.width(); float height = frameMat.height(); texture = new Texture2D(frameMat.cols(), frameMat.rows(), TextureFormat.RGBA32, false); texture.wrapMode = TextureWrapMode.Clamp; previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture; previewQuad.transform.localScale = new Vector3(0.2f * width / height, 0.2f, 1); float imageSizeScale = 1.0f; // set cameraparam. camMatrix = CreateCameraMatrix(width, height); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); // calibration camera. Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx[0]); Debug.Log("fovy " + fovy[0]); Debug.Log("focalLength " + focalLength[0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio[0]); bgrMat = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC3); rgbaMat = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC4); ids = new Mat(); corners = new List <Mat>(); rejectedCorners = new List <Mat>(); rvecs = new List <Mat>(); tvecs = new List <Mat>(); detectorParams = DetectorParameters.create(); detectorParams.set_cornerRefinementMethod(1);// do cornerSubPix() of OpenCV. dictionary = Aruco.getPredefinedDictionary((int)dictionaryId); recoveredIdxs = new Mat(); charucoCorners = new Mat(); charucoIds = new Mat(); charucoBoard = CharucoBoard.create((int)squaresX, (int)squaresY, chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary); allCorners = new List <List <Mat> >(); allIds = new List <Mat>(); allImgs = new List <Mat>(); imagePoints = new List <Mat>(); isInitialized = true; }
/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited() { Debug.Log("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); //if WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat grayMat = webCamTextureToMatHelper.GetMat(); float rawFrameWidth = grayMat.width(); float rawFrameHeight = grayMat.height(); if (enableDownScale) { downScaleMat = imageOptimizationHelper.GetDownScaleMat(grayMat); DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio; } else { downScaleMat = grayMat; DOWNSCALE_RATIO = 1.0f; } float width = downScaleMat.width(); float height = downScaleMat.height(); texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false); previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture; previewQuad.transform.localScale = new Vector3(0.2f * width / height, 0.2f, 1); previewQuad.SetActive(displayCameraPreview); //Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); DebugUtils.AddDebugStr(webCamTextureToMatHelper.GetWidth() + " x " + webCamTextureToMatHelper.GetHeight() + " : " + webCamTextureToMatHelper.GetFPS()); if (enableDownScale) { DebugUtils.AddDebugStr("enableDownScale = true: " + DOWNSCALE_RATIO + " / " + width + " x " + height); } // create camera matrix and dist coeffs. string loadDirectoryPath = Path.Combine(Application.persistentDataPath, "HoloLensArUcoCameraCalibrationExample"); string calibratonDirectoryName = "camera_parameters" + rawFrameWidth + "x" + rawFrameWidth; string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName); string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml"); if (useStoredCameraParameters && File.Exists(loadPath)) { // If there is a camera parameters stored by HoloLensArUcoCameraCalibrationExample, use it CameraParameters param; XmlSerializer serializer = new XmlSerializer(typeof(CameraParameters)); using (var stream = new FileStream(loadPath, FileMode.Open)) { param = (CameraParameters)serializer.Deserialize(stream); } double fx = param.camera_matrix[0]; double fy = param.camera_matrix[4]; double cx = param.camera_matrix[2]; double cy = param.camera_matrix[5]; camMatrix = CreateCameraMatrix(fx, fy, cx / DOWNSCALE_RATIO, cy / DOWNSCALE_RATIO); distCoeffs = new MatOfDouble(param.GetDistortionCoefficients()); Debug.Log("Loaded CameraParameters from a stored XML file."); Debug.Log("loadPath: " + loadPath); DebugUtils.AddDebugStr("Loaded CameraParameters from a stored XML file."); DebugUtils.AddDebugStr("loadPath: " + loadPath); } else { if (useStoredCameraParameters && !File.Exists(loadPath)) { DebugUtils.AddDebugStr("The CameraParameters XML file (" + loadPath + ") does not exist."); } #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API CameraIntrinsics cameraIntrinsics = webCamTextureToMatHelper.GetCameraIntrinsics(); camMatrix = CreateCameraMatrix(cameraIntrinsics.FocalLengthX, cameraIntrinsics.FocalLengthY, cameraIntrinsics.PrincipalPointX / DOWNSCALE_RATIO, cameraIntrinsics.PrincipalPointY / DOWNSCALE_RATIO); distCoeffs = new MatOfDouble(cameraIntrinsics.RadialDistK1, cameraIntrinsics.RadialDistK2, cameraIntrinsics.RadialDistK3, cameraIntrinsics.TangentialDistP1, cameraIntrinsics.TangentialDistP2); Debug.Log("Created CameraParameters from VideoMediaFrame.CameraIntrinsics on device."); DebugUtils.AddDebugStr("Created CameraParameters from VideoMediaFrame.CameraIntrinsics on device."); #else // The camera matrix value of Hololens camera 896x504 size. // For details on the camera matrix, please refer to this page. (http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html) // These values are unique to my device, obtained from the "Windows.Media.Devices.Core.CameraIntrinsics" class. (https://docs.microsoft.com/en-us/uwp/api/windows.media.devices.core.cameraintrinsics) // Can get these values by using this helper script. (https://github.com/EnoxSoftware/HoloLensWithOpenCVForUnityExample/tree/master/Assets/HololensCameraIntrinsicsChecker/CameraIntrinsicsCheckerHelper) double fx = 1035.149; //focal length x. double fy = 1034.633; //focal length y. double cx = 404.9134; //principal point x. double cy = 236.2834; //principal point y. double distCoeffs1 = 0.2036923; //radial distortion coefficient k1. double distCoeffs2 = -0.2035773; //radial distortion coefficient k2. double distCoeffs3 = 0.0; //tangential distortion coefficient p1. double distCoeffs4 = 0.0; //tangential distortion coefficient p2. double distCoeffs5 = -0.2388065; //radial distortion coefficient k3. camMatrix = CreateCameraMatrix(fx, fy, cx / DOWNSCALE_RATIO, cy / DOWNSCALE_RATIO); distCoeffs = new MatOfDouble(distCoeffs1, distCoeffs2, distCoeffs3, distCoeffs4, distCoeffs5); Debug.Log("Created a dummy CameraParameters (896x504)."); DebugUtils.AddDebugStr("Created a dummy CameraParameters (896x504)."); #endif } Debug.Log("camMatrix " + camMatrix.dump()); Debug.Log("distCoeffs " + distCoeffs.dump()); //DebugUtils.AddDebugStr("camMatrix " + camMatrix.dump()); //DebugUtils.AddDebugStr("distCoeffs " + distCoeffs.dump()); //Calibration camera Size imageSize = new Size(width, height); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx[0]); Debug.Log("fovy " + fovy[0]); Debug.Log("focalLength " + focalLength[0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio[0]); // Display objects near the camera. arCamera.nearClipPlane = 0.01f; ids = new Mat(); corners = new List <Mat>(); rejectedCorners = new List <Mat>(); rvecs = new Mat(); tvecs = new Mat(); rotMat = new Mat(3, 3, CvType.CV_64FC1); transformationM = new Matrix4x4(); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); detectorParams = DetectorParameters.create(); dictionary = Aruco.getPredefinedDictionary(Aruco.DICT_6X6_250); //If WebCamera is frontFaceing, flip Mat. webCamTextureToMatHelper.flipHorizontal = webCamTextureToMatHelper.IsFrontFacing(); rgbMat4preview = new Mat(); }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat rawSizeMat = webCamTextureToMatHelper.GetMat(); float rawSizeWidth = rawSizeMat.width(); float rawSizeHeight = rawSizeMat.height(); Mat webCamTextureMat = imageOptimizationHelper.GetDownScaleMat(rawSizeMat); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false); previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture; previewQuad.transform.localScale = new Vector3(1, height / width, 1); previewQuad.SetActive(displayCameraPreview); // set camera parameters. double fx; double fy; double cx; double cy; string loadDirectoryPath = Path.Combine(Application.persistentDataPath, "HoloLensArUcoCameraCalibrationExample"); string calibratonDirectoryName = "camera_parameters" + rawSizeWidth + "x" + rawSizeHeight; string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName); string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml"); if (useStoredCameraParameters && File.Exists(loadPath)) { CameraParameters param; XmlSerializer serializer = new XmlSerializer(typeof(CameraParameters)); using (var stream = new FileStream(loadPath, FileMode.Open)) { param = (CameraParameters)serializer.Deserialize(stream); } fx = param.camera_matrix[0]; fy = param.camera_matrix[4]; cx = param.camera_matrix[2] / imageOptimizationHelper.downscaleRatio; cy = param.camera_matrix[5] / imageOptimizationHelper.downscaleRatio; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); distCoeffs = new MatOfDouble(param.GetDistortionCoefficients()); Debug.Log("Loaded CameraParameters from a stored XML file."); Debug.Log("loadPath: " + loadPath); } else { fx = this.fx; fy = this.fy; cx = this.cx / imageOptimizationHelper.downscaleRatio; cy = this.cy / imageOptimizationHelper.downscaleRatio; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); distCoeffs = new MatOfDouble(this.distCoeffs1, this.distCoeffs2, this.distCoeffs3, this.distCoeffs4, this.distCoeffs5); Debug.Log("Created a dummy CameraParameters."); } Debug.Log("camMatrix " + camMatrix.dump()); Debug.Log("distCoeffs " + distCoeffs.dump()); //Calibration camera Size imageSize = new Size(width, height); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); // Display objects near the camera. arCamera.nearClipPlane = 0.01f; grayMat = new Mat(); ids = new Mat(); corners = new List <Mat> (); rejectedCorners = new List <Mat> (); rvecs = new Mat(); tvecs = new Mat(); rotMat = new Mat(3, 3, CvType.CV_64FC1); transformationM = new Matrix4x4(); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); detectorParams = DetectorParameters.create(); dictionary = Aruco.getPredefinedDictionary(Aruco.DICT_6X6_250); //If WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } downScaleFrameMat = new Mat((int)height, (int)width, CvType.CV_8UC4); rgbMat4preview = new Mat(); }
void InitializeCameraMatrix(Mat inputImageMat) { Debug.Log("******************************"); float width = inputImageMat.width(); float height = inputImageMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { imageSizeScale = (float)Screen.height / (float)Screen.width; } // Set camera param int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; _CamMatrix = new Mat(3, 3, CvType.CV_64FC1); _CamMatrix.put(0, 0, fx); _CamMatrix.put(0, 1, 0); _CamMatrix.put(0, 2, cx); _CamMatrix.put(1, 0, 0); _CamMatrix.put(1, 1, fy); _CamMatrix.put(1, 2, cy); _CamMatrix.put(2, 0, 0); _CamMatrix.put(2, 1, 0); _CamMatrix.put(2, 2, 1.0f); Debug.Log("CamMatrix " + _CamMatrix.dump()); _DistCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("DistCoeffs " + _DistCoeffs.dump()); // Calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(_CamMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("ImageSize " + imageSize.ToString()); Debug.Log("ApertureWidth " + apertureWidth); Debug.Log("ApertureHeight " + apertureHeight); Debug.Log("Fovx " + fovx [0]); Debug.Log("Fovy " + fovy [0]); Debug.Log("FocalLength " + focalLength [0]); Debug.Log("PrincipalPoint " + principalPoint.ToString()); Debug.Log("Aspectratio " + aspectratio [0]); // To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("FovXScale " + fovXScale); Debug.Log("FovYScale " + fovYScale); // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } Debug.Log("******************************"); }
/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited () { Debug.Log ("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat (); colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()]; texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1); Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float imageScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent<Renderer> ().material.mainTexture = texture; grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } int max_d = Mathf.Max (webCamTextureMat.rows (), webCamTextureMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, webCamTextureMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, webCamTextureMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Size imageSize = new Size (webCamTextureMat.cols () * imageScale, webCamTextureMat.rows () * imageScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); if (Screen.height > Screen.width) { ARCamera.fieldOfView = (float)fovx [0]; } else { ARCamera.fieldOfView = (float)fovy [0]; } Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); axes.SetActive (false); head.SetActive (false); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); }
// プロセスを起動する private IEnumerator init() { rightEye.SetActive(false); leftEye.SetActive(false); Debug.Log("---------------------------------------------------------------Eye"); Debug.Log(leftEye.transform.localPosition); Debug.Log(rightEye.transform.localPosition); Debug.Log("---------------------------------------------------------------Eye"); if (webCamTexture != null) { faceTracker.reset(); webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); cascade.Dispose(); camMatrix.Dispose(); distCoeffs.Dispose(); } // カメラがデバイスで使用可能かチェック for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices[cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices[cameraIndex].name + " isFrontFacing " + WebCamTexture.devices[cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices[cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices[0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // カメラを起動します webCamTexture.Play(); while (true) { // iOSの上webcamTexture.widthとwebcamTexture.heightを使用する場合は、それ以外の場合はこれら2つの値が16に等しくなり、webcamTexture.didUpdateThisFrame== 1まで待つ必要があります. #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32(); yield return(new WaitForEndOfFrame()); } #endif #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.GetComponent <Renderer>().material.mainTexture = texture; updateLayout(); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, rgbaMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, rgbaMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); // 主点 double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx[0]); Debug.Log("fovy " + fovy[0]); Debug.Log("focalLength " + focalLength[0]); Debug.Log("--------------------------principalPoint"); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("--------------------------principalPoint"); Debug.Log("aspectratio " + aspectratio[0]); ARCamera.fieldOfView = (float)fovy[0]; Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0)); Debug.Log("lookAt " + lookAtM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return(0); } } }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = imageOptimizationHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat()); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API // HololensCameraStream always returns image data in BGRA format. texture = new Texture2D((int)width, (int)height, TextureFormat.BGRA32, false); #else texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false); #endif previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture; previewQuad.transform.localScale = new Vector3(1, height / width, 1); previewQuad.SetActive(displayCameraPreview); double fx = this.fx; double fy = this.fy; double cx = this.cx / imageOptimizationHelper.downscaleRatio; double cy = this.cy / imageOptimizationHelper.downscaleRatio; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(distCoeffs1, distCoeffs2, distCoeffs3, distCoeffs4, distCoeffs5); Debug.Log("distCoeffs " + distCoeffs.dump()); //Calibration camera Size imageSize = new Size(width, height); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); transformationM = new Matrix4x4(); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem> (true); //If WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } grayMat = new Mat(); cascade = new CascadeClassifier(); cascade.load(OpenCVForUnity.Utils.getFilePath("lbpcascade_frontalface.xml")); // "empty" method is not working on the UWP platform. // if (cascade.empty ()) { // Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // } grayMat4Thread = new Mat(); cascade4Thread = new CascadeClassifier(); cascade4Thread.load(OpenCVForUnity.Utils.getFilePath("haarcascade_frontalface_alt.xml")); // "empty" method is not working on the UWP platform. // if (cascade4Thread.empty ()) { // Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // } detectionResult = new MatOfRect(); }
private IEnumerator init() { axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); if (webCamTexture != null) { faceTracker.reset(); webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); cascade.Dispose(); camMatrix.Dispose(); distCoeffs.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1); gameObject.transform.localEulerAngles = new Vector3(0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, rgbaMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, rgbaMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0)); Debug.Log("lookAt " + lookAtM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); initDone = true; break; } else { yield return(0); } } } // Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("faces " + faces.dump()); //add initial face points from MatOfRect faceTracker.addPoints(faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isDrawPoints) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints() [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // } imagePoints.fromArray( points [31], //l eye points [36], //r eye points [67], //nose points [48], //l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) { isRefresh = false; } } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) { isRefresh = false; } } } } if (isRefresh) { if (!rightEye.activeSelf) { rightEye.SetActive(true); } if (!leftEye.activeSelf) { leftEye.SetActive(true); } if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) { mouth.SetActive(true); } } else { if (mouth.activeSelf) { mouth.SetActive(false); } } rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix(); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); } } void OnDisable() { webCamTexture.Stop(); } private Matrix4x4 getLookAtMatrix(Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize(pos - target); Vector3 x = Vector3.Normalize(Vector3.Cross(up, z)); Vector3 y = Vector3.Normalize(Vector3.Cross(z, x)); Matrix4x4 result = new Matrix4x4(); result.SetRow(0, new Vector4(x.x, x.y, x.z, -(Vector3.Dot(pos, x)))); result.SetRow(1, new Vector4(y.x, y.y, y.z, -(Vector3.Dot(pos, y)))); result.SetRow(2, new Vector4(z.x, z.y, z.z, -(Vector3.Dot(pos, z)))); result.SetRow(3, new Vector4(0, 0, 0, 1)); return(result); } void OnGUI() { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("FaceTrackerSample"); } if (GUILayout.Button("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine(init()); } if (GUILayout.Button("drawPoints")) { if (isDrawPoints) { isDrawPoints = false; } else { isDrawPoints = true; } } if (GUILayout.Button("axes")) { if (axes.activeSelf) { axes.SetActive(false); } else { axes.SetActive(true); } } if (GUILayout.Button("head")) { if (head.activeSelf) { head.SetActive(false); } else { head.SetActive(true); } } GUILayout.EndVertical(); } }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.GetComponent <Renderer>().material.mainTexture = texture; gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); if (fpsMonitor != null) { fpsMonitor.Add("width", webCamTextureMat.width().ToString()); fpsMonitor.Add("height", webCamTextureMat.height().ToString()); fpsMonitor.Add("orientation", Screen.orientation.ToString()); fpsMonitor.consoleText = "Please touch the area of the open hand."; } float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeff = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeff.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx[0]); Debug.Log("fovy " + fovy[0]); Debug.Log("focalLength " + focalLength[0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio[0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx[0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy[0] * fovYScale); } invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); detector = new ColorBlobDetector(); spectrumMat = new Mat(); blobColorRgba = new Scalar(255); blobColorHsv = new Scalar(255); SPECTRUM_SIZE = new Size(200, 64); CONTOUR_COLOR = new Scalar(255, 0, 0, 255); CONTOUR_COLOR_WHITE = new Scalar(255, 255, 255, 255); rectPoints = new MatOfPoint2f(); }
/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited() { Debug.Log("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); colors = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()]; texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = gameObject.transform.localScale.x; float height = gameObject.transform.localScale.y; float imageScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent <Renderer> ().material.mainTexture = texture; int max_d = Mathf.Max(webCamTextureMat.rows(), webCamTextureMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, webCamTextureMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, webCamTextureMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size(webCamTextureMat.cols() * imageScale, webCamTextureMat.rows() * imageScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); if (widthScale < heightScale) { ARCamera.fieldOfView = (float)fovx [0]; } else { ARCamera.fieldOfView = (float)fovy [0]; } Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem> (true); }
// Use this for initialization void Start() { gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); float width = imgMat.width(); float height = imgMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //Adjust Unity Camera FOV if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } MarkerDetector markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); markerDetector.processFrame(imgMat, 1); foreach (MarkerSettings settings in markerSettings) { settings.setAllARGameObjectsDisable(); } if (shouldMoveARCamera) { List <Marker> findMarkers = markerDetector.getFindMarkers(); if (findMarkers.Count > 0) { Marker marker = findMarkers [0]; if (markerSettings.Length > 0) { MarkerSettings settings = markerSettings [0]; if (marker.id == settings.getMarkerId()) { Matrix4x4 transformationM = marker.transformation; Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); GameObject ARGameObject = settings.getARGameObject(); if (ARGameObject != null) { Matrix4x4 ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM; Debug.Log("ARM " + ARM.ToString()); ARGameObject.SetActive(true); ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); } } } } } else { List <Marker> findMarkers = markerDetector.getFindMarkers(); for (int i = 0; i < findMarkers.Count; i++) { Marker marker = findMarkers [i]; foreach (MarkerSettings settings in markerSettings) { if (marker.id == settings.getMarkerId()) { Matrix4x4 transformationM = marker.transformation; Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); Matrix4x4 ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM; Debug.Log("ARM " + ARM.ToString()); GameObject ARGameObject = settings.getARGameObject(); if (ARGameObject != null) { ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); ARGameObject.SetActive(true); } } } } } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
// プロセスを起動する private IEnumerator init() { rightEye.SetActive (false); leftEye.SetActive (false); Debug.Log("---------------------------------------------------------------Eye"); Debug.Log(leftEye.transform.localPosition); Debug.Log(rightEye.transform.localPosition); Debug.Log("---------------------------------------------------------------Eye"); if (webCamTexture != null) { faceTracker.reset(); webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); cascade.Dispose(); camMatrix.Dispose(); distCoeffs.Dispose(); } // カメラがデバイスで使用可能かチェック for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices[cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices[cameraIndex].name + " isFrontFacing " + WebCamTexture.devices[cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices[cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices[0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // カメラを起動します webCamTexture.Play(); while (true) { // iOSの上webcamTexture.widthとwebcamTexture.heightを使用する場合は、それ以外の場合はこれら2つの値が16に等しくなり、webcamTexture.didUpdateThisFrame== 1まで待つ必要があります. #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32 (); yield return new WaitForEndOfFrame (); } #endif #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.GetComponent<Renderer>().material.mainTexture = texture; updateLayout(); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, rgbaMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, rgbaMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); // 主点 double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx[0]); Debug.Log("fovy " + fovy[0]); Debug.Log("focalLength " + focalLength[0]); Debug.Log("--------------------------principalPoint"); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("--------------------------principalPoint"); Debug.Log("aspectratio " + aspectratio[0]); ARCamera.fieldOfView = (float)fovy[0]; Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0)); Debug.Log("lookAt " + lookAtM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return 0; } } }
/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited() { Debug.Log("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); colors = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()]; texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float imageScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent <Renderer> ().material.mainTexture = texture; grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } int max_d = Mathf.Max(webCamTextureMat.rows(), webCamTextureMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, webCamTextureMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, webCamTextureMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size(webCamTextureMat.cols() * imageScale, webCamTextureMat.rows() * imageScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); if (widthScale < heightScale) { ARCamera.fieldOfView = (float)fovx [0]; } else { ARCamera.fieldOfView = (float)fovy [0]; } Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); ScreenQuad.GetComponent <Renderer> ().material.mainTexture = texture; Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { } else { imageSizeScale = (float)Screen.height / (float)Screen.width; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //resize screen Quad Matrix4x4 p = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 0.3f, 1000f); Vector3 cameraSpacePos = UnProjectVector(p, new Vector3(1.0f, 1.0f, 1.0f)); if (widthScale > heightScale) { ScreenQuad.transform.localScale = new Vector3(cameraSpacePos.x * 2f, cameraSpacePos.x * height / width * 2f, 1); } else { ScreenQuad.transform.localScale = new Vector3(cameraSpacePos.y * width / height * 2f, cameraSpacePos.y * 2f, 1); } //create markerDetector MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); //if WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }
/// <summary> /// Raises the webcam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGB24, false); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); if (fpsMonitor != null) { fpsMonitor.Add("width", webCamTextureMat.width().ToString()); fpsMonitor.Add("height", webCamTextureMat.height().ToString()); fpsMonitor.Add("orientation", Screen.orientation.ToString()); } float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } // set camera parameters. double fx; double fy; double cx; double cy; string loadDirectoryPath = Path.Combine(Application.persistentDataPath, "ArUcoCameraCalibrationExample"); string calibratonDirectoryName = "camera_parameters" + width + "x" + height; string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName); string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml"); if (useStoredCameraParameters && File.Exists(loadPath)) { CameraParameters param; XmlSerializer serializer = new XmlSerializer(typeof(CameraParameters)); using (var stream = new FileStream(loadPath, FileMode.Open)) { param = (CameraParameters)serializer.Deserialize(stream); } camMatrix = param.GetCameraMatrix(); distCoeffs = new MatOfDouble(param.GetDistortionCoefficients()); fx = param.camera_matrix [0]; fy = param.camera_matrix [4]; cx = param.camera_matrix [2]; cy = param.camera_matrix [5]; Debug.Log("Loaded CameraParameters from a stored XML file."); Debug.Log("loadPath: " + loadPath); } else { int max_d = (int)Mathf.Max(width, height); fx = max_d; fy = max_d; cx = width / 2.0f; cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("Created a dummy CameraParameters."); } Debug.Log("camMatrix " + camMatrix.dump()); Debug.Log("distCoeffs " + distCoeffs.dump()); // calibration camera matrix values. Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); // To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { arCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { arCamera.fieldOfView = (float)(fovy [0] * fovYScale); } // Display objects near the camera. arCamera.nearClipPlane = 0.01f; rgbMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3); ids = new Mat(); corners = new List <Mat> (); rejectedCorners = new List <Mat> (); rvecs = new Mat(); tvecs = new Mat(); rotMat = new Mat(3, 3, CvType.CV_64FC1); detectorParams = DetectorParameters.create(); dictionary = Aruco.getPredefinedDictionary((int)dictionaryId); rvec = new Mat(); tvec = new Mat(); recoveredIdxs = new Mat(); gridBoard = GridBoard.create(gridBoradMarkersX, gridBoradMarkersY, gridBoradMarkerLength, gridBoradMarkerSeparation, dictionary, gridBoradMarkerFirstMarker); charucoCorners = new Mat(); charucoIds = new Mat(); charucoBoard = CharucoBoard.create(chArUcoBoradSquaresX, chArUcoBoradSquaresY, chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary); diamondCorners = new List <Mat> (); diamondIds = new Mat(1, 1, CvType.CV_32SC4); diamondIds.put(0, 0, new int[] { diamondId1, diamondId2, diamondId3, diamondId4 }); // if WebCamera is frontFaceing, flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }
// Use this for initialization void Start() { Mat rgbMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, rgbMat); Debug.Log("imgMat dst ToString " + rgbMat.ToString()); gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = rgbMat.width(); float height = rgbMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } // set cameraparam. int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); // calibration camera. Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); // To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } Mat ids = new Mat(); List <Mat> corners = new List <Mat> (); List <Mat> rejected = new List <Mat> (); Mat rvecs = new Mat(); Mat tvecs = new Mat(); Mat rotMat = new Mat(3, 3, CvType.CV_64FC1); DetectorParameters detectorParams = DetectorParameters.create(); Dictionary dictionary = Aruco.getPredefinedDictionary(dictionaryId); // detect markers. Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected); // estimate pose. if (applyEstimationPose && ids.total() > 0) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); } if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0)); if (applyEstimationPose) { for (int i = 0; i < ids.total(); i++) { // Debug.Log ("ids.dump() " + ids.dump ()); Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f); // This example can display ARObject on only first detected marker. if (i == 0) { // position double[] tvec = tvecs.get(i, 0); // rotation double[] rv = rvecs.get(i, 0); Mat rvec = new Mat(3, 1, CvType.CV_64FC1); rvec.put(0, 0, rv[0]); rvec.put(1, 0, rv[1]); rvec.put(2, 0, rv[2]); Calib3d.Rodrigues(rvec, rotMat); Matrix4x4 transformationM = new Matrix4x4(); // from OpenCV transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0])); transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1])); transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); // right-handed coordinates system (OpenCV) to left-handed one (Unity) Matrix4x4 ARM = invertYM * transformationM; // Apply Z axis inverted matrix. ARM = ARM * invertZM; if (shouldMoveARCamera) { ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); } else { ARM = ARCamera.transform.localToWorldMatrix * ARM; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); } } } } } if (showRejected && rejected.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255)); } Texture2D texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(rgbMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(webCamTextureMat, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); if (fpsMonitor != null) { fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName); fpsMonitor.Add("width", webCamTextureToMatHelper.GetWidth().ToString()); fpsMonitor.Add("height", webCamTextureToMatHelper.GetHeight().ToString()); fpsMonitor.Add("orientation", Screen.orientation.ToString()); } float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); // create AR camera P * V Matrix Matrix4x4 P = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 0.3f, 2000f); Matrix4x4 V = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); VP = P * V; //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx[0]); Debug.Log("fovy " + fovy[0]); Debug.Log("focalLength " + focalLength[0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio[0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx[0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy[0] * fovYScale); } invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem>(true); }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); Mat img = Imgcodecs.imread(image_filepath); #if !UNITY_WSA_10_0 if (img.empty()) { Debug.LogError("dnn/COCO_val2014_000000000589.jpg is not loaded.The image file can be downloaded here: \"https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/examples/media/COCO_val2014_000000000589.jpg\" folder. "); img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0)); } #endif //Adust Quad.transform.localScale. gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } Net net = null; if (string.IsNullOrEmpty(caffemodel_filepath) || string.IsNullOrEmpty(prototxt_filepath)) { Debug.LogError("model file is not loaded. The model and prototxt file can be downloaded here: \"http://posefs1.perception.cs.cmu.edu/OpenPose/models/pose/mpi/pose_iter_160000.caffemodel\",\"https://github.com/opencv/opencv_extra/blob/master/testdata/dnn/openpose_pose_mpi_faster_4_stages.prototxt\". Please copy to “Assets/StreamingAssets/dnn/” folder. "); } else { net = Dnn.readNetFromCaffe(prototxt_filepath, caffemodel_filepath); //Intel's Deep Learning Inference Engine backend is supported on Windows 64bit platform only. Please refer to ReadMe.pdf for the setup procedure. //net.setPreferableBackend (Dnn.DNN_BACKEND_INFERENCE_ENGINE); } if (net == null) { Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } else { float frameWidth = img.cols(); float frameHeight = img.rows(); Mat input = Dnn.blobFromImage(img, 1.0 / 255, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false); net.setInput(input); // TickMeter tm = new TickMeter (); // tm.start (); Mat output = net.forward(); // tm.stop (); // Debug.Log ("Inference time, ms: " + tm.getTimeMilli ()); output = output.reshape(1, 16); float[] data = new float[46 * 46]; List <Point> points = new List <Point> (); for (int i = 0; i < BODY_PARTS.Count; i++) { output.get(i, 0, data); Mat heatMap = new Mat(1, data.Length, CvType.CV_32FC1); heatMap.put(0, 0, data); //Originally, we try to find all the local maximums. To simplify a sample //we just find a global one. However only a single pose at the same time //could be detected this way. Core.MinMaxLocResult result = Core.minMaxLoc(heatMap); heatMap.Dispose(); double x = (frameWidth * (result.maxLoc.x % 46)) / 46; double y = (frameHeight * (result.maxLoc.x / 46)) / 46; if (result.maxVal > 0.1) { points.Add(new Point(x, y)); } else { points.Add(null); } } for (int i = 0; i < POSE_PAIRS.GetLength(0); i++) { string partFrom = POSE_PAIRS [i, 0]; string partTo = POSE_PAIRS [i, 1]; int idFrom = BODY_PARTS [partFrom]; int idTo = BODY_PARTS [partTo]; if (points [idFrom] != null && points [idTo] != null) { Imgproc.line(img, points [idFrom], points [idTo], new Scalar(0, 255, 0), 3); Imgproc.ellipse(img, points [idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED); Imgproc.ellipse(img, points [idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED); } } MatOfDouble timings = new MatOfDouble(); long t = net.getPerfProfile(timings); Debug.Log("t: " + t); Debug.Log("timings.dump(): " + timings.dump()); double freq = Core.getTickFrequency() / 1000; Debug.Log("freq: " + freq); Imgproc.putText(img, (t / freq) + "ms", new Point(10, img.height() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar(0, 0, 255), 2); } Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; Utils.setDebugMode(false); }
/// <summary> /// Init this instance. /// </summary> private IEnumerator init () { if (webCamTexture != null) { webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32 (); yield return new WaitForEndOfFrame (); } #endif #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; updateLayout (); //set cameraparam int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].fieldOfView = (float)fovy [0]; } markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign); //Marker Coordinate Initial Matrix lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return 0; } } }
public virtual double Calibrate(Mat mat) { var repError = -1.0; var points = new MatOfPoint2f(); var patternSize = new Size((int)SizeX, (int)SizeY); var found = false; switch (boardType) { case BoardType.ChessBoard: found = Calib3d.findChessboardCorners(mat, patternSize, points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE); break; case BoardType.CirclesGrid: found = Calib3d.findCirclesGrid(mat, patternSize, points, Calib3d.CALIB_CB_SYMMETRIC_GRID); break; case BoardType.AsymmetricCirclesGrid: found = Calib3d.findCirclesGrid(mat, patternSize, points, Calib3d.CALIB_CB_ASYMMETRIC_GRID); break; } if (found) { if (boardType == BoardType.ChessBoard) { Imgproc.cornerSubPix(mat, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1)); } imagePoints.Add(points); } else { Debug.Log("Invalid frame."); if (points != null) { points.Dispose(); } ErrorEvent?.Invoke("Invalid frame."); return(repError); } if (imagePoints.Count < 1) { Debug.Log("Not enough points for calibration."); ErrorEvent?.Invoke("Not enough points for calibration."); return(repError); } else { var objectPoint = new MatOfPoint3f(new Mat(imagePoints[0].rows(), 1, CvType.CV_32FC3)); CalcCorners(patternSize, squareSize, objectPoint); var objectPoints = new List <Mat>(); for (int i = 0; i < imagePoints.Count; ++i) { objectPoints.Add(objectPoint); } repError = Calib3d.calibrateCamera(objectPoints, imagePoints, mat.size(), cameraMatrix, distCoeffs, rvecs, tvecs); CalibrationEvent?.Invoke(repError); objectPoint.Dispose(); } Debug.Log("-----calibrate-----"); Debug.Log("repErr: " + repError); Debug.Log("camMatrix: " + cameraMatrix.dump()); Debug.Log("distCoeffs: " + distCoeffs.dump()); return(repError); }
/// <summary> /// Init this instance. /// </summary> private IEnumerator init () { if (webCamTexture != null) { webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); // bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; //set cameraparam int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].fieldOfView = (float)fovy [0]; } markerDetector = new MarkerDetector (camMatrix, distCoeffs); //Marker Coordinate Initial Matrix lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } markerDetector.processFrame (rgbaMat, 1); //Debug.Log ("markerDetector.getTransformations ().Count " + markerDetector.getTransformations ().Count); for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].gameObject.SetActive (false); } int markerCount = markerDetector.getTransformations ().Count; for (int i = 0; i < markerCount; i++) { if (i > ARCamera.Length - 1) break; ARCamera [i].gameObject.SetActive (true); //Marker to Camera Coordinate System Convert Matrix transformationM = markerDetector.getTransformations () [i]; //Debug.Log ("transformationM " + transformationM.ToString ()); worldToCameraM = lookAtM * transformationM * invertZM; //Debug.Log ("worldToCameraM " + worldToCameraM.ToString ()); ARCamera [i].worldToCameraMatrix = worldToCameraM; } Utils.matToTexture2D (rgbaMat, texture, colors); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; } } void OnDisable () { webCamTexture.Stop (); } /// <summary> /// Gets the look at matrix. /// </summary> /// <returns>The look at matrix.</returns> /// <param name="pos">Position.</param> /// <param name="target">Target.</param> /// <param name="up">Up.</param> private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize (pos - target); Vector3 x = Vector3.Normalize (Vector3.Cross (up, z)); Vector3 y = Vector3.Normalize (Vector3.Cross (z, x)); Matrix4x4 result = new Matrix4x4 (); result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x)))); result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y)))); result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z)))); result.SetRow (3, new Vector4 (0, 0, 0, 1)); return result; } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("MarkerBasedARSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } GUILayout.EndVertical (); } }
private IEnumerator init() { axes.SetActive (false); head.SetActive (false); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); if (webCamTexture != null) { faceTracker.reset (); webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); grayMat.Dispose (); cascade.Dispose (); camMatrix.Dispose (); distCoeffs.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) { if (isDrawPoints) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints () [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // } imagePoints.fromArray ( points [31],//l eye points [36],//r eye points [67],//nose points [48],//l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat (); rvec.copyTo (oldRvec); } if (oldTvec == null) { oldTvec = new Mat (); tvec.copyTo (oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat ()) { Core.absdiff (rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat ()) { Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero (cmpRvec) > 0) isRefresh = false; } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat ()) { Core.absdiff (tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat ()) { Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero (cmpTvec) > 0) isRefresh = false; } } } if (isRefresh) { if (!rightEye.activeSelf) rightEye.SetActive (true); if (!leftEye.activeSelf) leftEye.SetActive (true); if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) mouth.SetActive (true); } else { if (mouth.activeSelf) mouth.SetActive (false); } rvec.copyTo (oldRvec); tvec.copyTo (oldTvec); Calib3d.Rodrigues (rvec, rotM); transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0])); transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0])); transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0])); transformationM.SetRow (3, new Vector4 (0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); if (oldRvec != null) { oldRvec.Dispose (); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose (); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix (); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); } } void OnDisable () { webCamTexture.Stop (); } private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize (pos - target); Vector3 x = Vector3.Normalize (Vector3.Cross (up, z)); Vector3 y = Vector3.Normalize (Vector3.Cross (z, x)); Matrix4x4 result = new Matrix4x4 (); result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x)))); result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y)))); result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z)))); result.SetRow (3, new Vector4 (0, 0, 0, 1)); return result; } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("FaceTrackerSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } if (GUILayout.Button ("drawPoints")) { if (isDrawPoints) { isDrawPoints = false; } else { isDrawPoints = true; } } if (GUILayout.Button ("axes")) { if (axes.activeSelf) { axes.SetActive (false); } else { axes.SetActive (true); } } if (GUILayout.Button ("head")) { if (head.activeSelf) { head.SetActive (false); } else { head.SetActive (true); } } GUILayout.EndVertical (); } }
private void DetectMarkers() { Utils.texture2DToMat(imgTexture, rgbMat); Debug.Log("imgMat dst ToString " + rgbMat.ToString()); gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = rgbMat.width(); float height = rgbMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } // set camera parameters. int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); // calibration camera matrix values. Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); // To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { arCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { arCamera.fieldOfView = (float)(fovy [0] * fovYScale); } // Display objects near the camera. arCamera.nearClipPlane = 0.01f; Mat ids = new Mat(); List <Mat> corners = new List <Mat> (); List <Mat> rejectedCorners = new List <Mat> (); Mat rvecs = new Mat(); Mat tvecs = new Mat(); Mat rotMat = new Mat(3, 3, CvType.CV_64FC1); DetectorParameters detectorParams = DetectorParameters.create(); Dictionary dictionary = Aruco.getPredefinedDictionary((int)dictionaryId); // detect markers. Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); // if at least one marker detected if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0)); // estimate pose. if (applyEstimationPose) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); for (int i = 0; i < ids.total(); i++) { using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) { // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue) Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f); } // This example can display the ARObject on only first detected marker. if (i == 0) { // Get translation vector double[] tvecArr = tvecs.get(i, 0); // Get rotation vector double[] rvecArr = rvecs.get(i, 0); Mat rvec = new Mat(3, 1, CvType.CV_64FC1); rvec.put(0, 0, rvecArr); // Convert rotation vector to rotation matrix. Calib3d.Rodrigues(rvec, rotMat); double[] rotMatArr = new double[rotMat.total()]; rotMat.get(0, 0, rotMatArr); // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4. Matrix4x4 transformationM = new Matrix4x4(); // from OpenCV transformationM.SetRow(0, new Vector4((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0])); transformationM.SetRow(1, new Vector4((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1])); transformationM.SetRow(2, new Vector4((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); // right-handed coordinates system (OpenCV) to left-handed one (Unity) // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix Matrix4x4 ARM = invertYM * transformationM * invertYM; if (shouldMoveARCamera) { ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM); } else { ARM = arCamera.transform.localToWorldMatrix * ARM; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM); } } } } } if (showRejectedCorners && rejectedCorners.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0)); } Utils.matToTexture2D(rgbMat, texture); }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); Mat img = Imgcodecs.imread(image_filepath); if (img.empty()) { Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0)); } //Adust Quad.transform.localScale. gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } Net net = null; if (string.IsNullOrEmpty(caffemodel_filepath) || string.IsNullOrEmpty(prototxt_filepath)) { Debug.LogError(caffemodel_filepath + " or " + prototxt_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } else { net = Dnn.readNet(prototxt_filepath, caffemodel_filepath); } if (net == null) { Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } else { float frameWidth = img.cols(); float frameHeight = img.rows(); Mat input = Dnn.blobFromImage(img, inScale, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false); net.setInput(input); //TickMeter tm = new TickMeter (); //tm.start (); Mat output = net.forward(); //tm.stop (); //Debug.Log ("Inference time, ms: " + tm.getTimeMilli ()); //Debug.Log("output.size(0) " + output.size(0)); //Debug.Log("output.size(1) " + output.size(1)); //Debug.Log("output.size(2) " + output.size(2)); //Debug.Log("output.size(3) " + output.size(3)); float[] data = new float[output.size(2) * output.size(3)]; output = output.reshape(1, output.size(1)); List <Point> points = new List <Point>(); for (int i = 0; i < BODY_PARTS.Count; i++) { output.get(i, 0, data); Mat heatMap = new Mat(1, data.Length, CvType.CV_32FC1); heatMap.put(0, 0, data); //Originally, we try to find all the local maximums. To simplify a sample //we just find a global one. However only a single pose at the same time //could be detected this way. Core.MinMaxLocResult result = Core.minMaxLoc(heatMap); heatMap.Dispose(); double x = (frameWidth * (result.maxLoc.x % 46)) / 46; double y = (frameHeight * (result.maxLoc.x / 46)) / 46; if (result.maxVal > 0.1) { points.Add(new Point(x, y)); } else { points.Add(null); } } for (int i = 0; i < POSE_PAIRS.GetLength(0); i++) { string partFrom = POSE_PAIRS[i, 0]; string partTo = POSE_PAIRS[i, 1]; int idFrom = BODY_PARTS[partFrom]; int idTo = BODY_PARTS[partTo]; if (points[idFrom] != null && points[idTo] != null) { Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3); Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED); Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED); } } MatOfDouble timings = new MatOfDouble(); long t = net.getPerfProfile(timings); Debug.Log("t: " + t); Debug.Log("timings.dump(): " + timings.dump()); double freq = Core.getTickFrequency() / 1000; Debug.Log("freq: " + freq); Imgproc.putText(img, (t / freq) + "ms", new Point(10, img.height() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar(0, 0, 255), 2); } Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; Utils.setDebugMode(false); }
/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited() { Debug.Log("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); colors = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()]; texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float imageScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent <Renderer> ().material.mainTexture = texture; //set cameraparam int max_d = Mathf.Max(webCamTextureMat.rows(), webCamTextureMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, webCamTextureMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, webCamTextureMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(webCamTextureMat.cols() * imageScale, webCamTextureMat.rows() * imageScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV if (widthScale < heightScale) { ARCamera.fieldOfView = (float)fovx [0]; } else { ARCamera.fieldOfView = (float)fovy [0]; } MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); //if WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }
// Use this for initialization void Start () { gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1); Camera.main.orthographicSize = imgTexture.height / 2; Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat (imgTexture, imgMat); Debug.Log ("imgMat dst ToString " + imgMat.ToString ()); //set cameraparam int max_d = Mathf.Max (imgMat.rows (), imgMat.cols ()); Mat camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, imgMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, imgMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); MatOfDouble distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (imgMat.cols (), imgMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV ARCamera.fieldOfView = (float)fovy [0]; // ARCamera.projectionMatrix = ARCamera.projectionMatrix * Matrix4x4.Scale(new Vector3(-1, -1, 1)); // gameObject.transform.localScale = new Vector3 (-1 * gameObject.transform.localScale.x, -1 * gameObject.transform.localScale.y, 1); MarkerDetector markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign); markerDetector.processFrame (imgMat, 1); //Marker Coordinate Initial Matrix Matrix4x4 lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //Marker to Camera Coordinate System Convert Matrix if (markerDetector.getTransformations ().Count > 0) { Matrix4x4 transformationM = markerDetector.getTransformations () [0]; Debug.Log ("transformationM " + transformationM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. Matrix4x4 invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); Matrix4x4 worldToCameraM = lookAtM * transformationM * invertZM; Debug.Log ("worldToCameraM " + worldToCameraM.ToString ()); ARCamera.worldToCameraMatrix = worldToCameraM; } else { Debug.LogWarning ("Marker is not detected"); } Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D (imgMat, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; }
// Use this for initialization void Start() { Mat patternMat = new Mat(patternTexture.height, patternTexture.width, CvType.CV_8UC4); Utils.texture2DToMat(patternTexture, patternMat); Debug.Log("patternMat dst ToString " + patternMat.ToString()); patternRawImage.texture = patternTexture; patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f); Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = imgMat.width(); float height = imgMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } //Learning the feature points of the pattern image. Pattern pattern = new Pattern(); PatternTrackingInfo patternTrackingInfo = new PatternTrackingInfo(); PatternDetector patternDetector = new PatternDetector(null, null, null, true); patternDetector.buildPatternFromImage(patternMat, pattern); patternDetector.train(pattern); bool patternFound = patternDetector.findPattern(imgMat, patternTrackingInfo); Debug.Log("patternFound " + patternFound); if (patternFound) { patternTrackingInfo.computePose(pattern, camMatrix, distCoeffs); Matrix4x4 transformationM = patternTrackingInfo.pose3d; Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); if (shouldMoveARCamera) { Matrix4x4 ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); } else { Matrix4x4 ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM; Debug.Log("ARM " + ARM.ToString()); ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); } } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
private void Run() { //set 3d face object points. objectPoints68 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 120), //nose (Nose top) new Point3(-26, 15, 83), //l mouse (Mouth breadth) new Point3(26, 15, 83), //r mouse (Mouth breadth) new Point3(-79, 90, 0.0), //l ear (Bitragion breadth) new Point3(79, 90, 0.0) //r ear (Bitragion breadth) ); objectPoints5 = new MatOfPoint3f( new Point3(-23, 90, 83), //l eye (Inner corner of the eye) new Point3(23, 90, 83), //r eye (Inner corner of the eye) new Point3(-50, 90, 80), //l eye (Tail of the eye) new Point3(50, 90, 80), //r eye (Tail of the eye) new Point3(0.0, 50, 120) //nose (Nose top) ); imagePoints = new MatOfPoint2f(); faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); rgbMat = new Mat(); capture = new VideoCapture(); capture.open(dance_avi_filepath); if (capture.isOpened()) { Debug.Log("capture.isOpened() true"); } else { Debug.Log("capture.isOpened() false"); } Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT)); Debug.Log("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get(Videoio.CV_CAP_PROP_PREVIEW_FORMAT)); Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC)); Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES)); Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO)); Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT)); Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS)); Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH)); Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT)); capture.grab(); capture.retrieve(rgbMat, 0); int frameWidth = rgbMat.cols(); int frameHeight = rgbMat.rows(); texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false); gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1); capture.set(Videoio.CAP_PROP_POS_FRAMES, 0); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); if (fpsMonitor != null) { fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName); fpsMonitor.Add("width", frameWidth.ToString()); fpsMonitor.Add("height", frameHeight.ToString()); fpsMonitor.Add("orientation", Screen.orientation.ToString()); } float width = (float)frameWidth; float height = (float)frameHeight; float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem> (true); }
private double CaptureFrame(Mat frameMat) { double repErr = -1; switch (markerType) { default: case MarkerType.ChArUcoBoard: List <Mat> corners = new List <Mat>(); Mat ids = new Mat(); Aruco.detectMarkers(frameMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs); if (refineMarkerDetection) { Aruco.refineDetectedMarkers(frameMat, charucoBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams); } if (ids.total() > 0) { Debug.Log("Frame captured."); allCorners.Add(corners); allIds.Add(ids); allImgs.Add(frameMat); } else { Debug.Log("Invalid frame."); frameMat.Dispose(); if (ids != null) { ids.Dispose(); } foreach (var item in corners) { item.Dispose(); } corners.Clear(); return(-1); } // calibrate camera using aruco markers //double arucoRepErr = CalibrateCameraAruco (allCorners, allIds, charucoBoard, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags); //Debug.Log ("arucoRepErr: " + arucoRepErr); // calibrate camera using charuco repErr = CalibrateCameraCharuco(allCorners, allIds, charucoBoard, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags, calibrationFlags); break; case MarkerType.ChessBoard: case MarkerType.CirclesGlid: case MarkerType.AsymmetricCirclesGlid: MatOfPoint2f points = new MatOfPoint2f(); Size patternSize = new Size((int)squaresX, (int)squaresY); bool found = false; switch (markerType) { default: case MarkerType.ChessBoard: found = Calib3d.findChessboardCorners(frameMat, patternSize, points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE); break; case MarkerType.CirclesGlid: found = Calib3d.findCirclesGrid(frameMat, patternSize, points, Calib3d.CALIB_CB_SYMMETRIC_GRID); break; case MarkerType.AsymmetricCirclesGlid: found = Calib3d.findCirclesGrid(frameMat, patternSize, points, Calib3d.CALIB_CB_ASYMMETRIC_GRID); break; } if (found) { Debug.Log("Frame captured."); if (markerType == MarkerType.ChessBoard) { Imgproc.cornerSubPix(frameMat, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1)); } imagePoints.Add(points); allImgs.Add(frameMat); } else { Debug.Log("Invalid frame."); frameMat.Dispose(); if (points != null) { points.Dispose(); } return(-1); } if (imagePoints.Count < 1) { Debug.Log("Not enough points for calibration."); repErr = -1; } else { MatOfPoint3f objectPoint = new MatOfPoint3f(new Mat(imagePoints[0].rows(), 1, CvType.CV_32FC3)); CalcChessboardCorners(patternSize, squareSize, objectPoint, markerType); List <Mat> objectPoints = new List <Mat>(); for (int i = 0; i < imagePoints.Count; ++i) { objectPoints.Add(objectPoint); } repErr = Calib3d.calibrateCamera(objectPoints, imagePoints, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags); objectPoint.Dispose(); } break; } Debug.Log("repErr: " + repErr); Debug.Log("camMatrix: " + camMatrix.dump()); Debug.Log("distCoeffs: " + distCoeffs.dump()); return(repErr); }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = Mathf.Round(webCamTextureMat.width() / downscaleRatio); float height = Mathf.Round(webCamTextureMat.height() / downscaleRatio); texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false); previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture; previewQuad.transform.localScale = new Vector3(1, height / width, 1); previewQuad.SetActive(displayCameraPreview); double fx = this.fx; double fy = this.fy; double cx = this.cx / downscaleRatio; double cy = this.cy / downscaleRatio; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(distCoeffs1, distCoeffs2, distCoeffs3, distCoeffs4, distCoeffs5); Debug.Log("distCoeffs " + distCoeffs.dump()); //Calibration camera Size imageSize = new Size(width, height); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3); ids = new Mat(); corners = new List <Mat> (); rejected = new List <Mat> (); rvecs = new Mat(); tvecs = new Mat(); rotMat = new Mat(3, 3, CvType.CV_64FC1); transformationM = new Matrix4x4(); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); detectorParams = DetectorParameters.create(); dictionary = Aruco.getPredefinedDictionary(Aruco.DICT_6X6_250); //If WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } rgbaMat4Thread = new Mat(); downScaleRgbaMat = new Mat(); rgbMat4preview = new Mat(); }
// Use this for initialization void Start() { //set 3d face object points. objectPoints = new MatOfPoint3f( new Point3(-31, 72, 86), //l eye new Point3(31, 72, 86), //r eye new Point3(0, 40, 114), //nose new Point3(-20, 15, 90), //l mouse new Point3(20, 15, 90), //r mouse new Point3(-69, 76, -2), //l ear new Point3(69, 76, -2) //r ear ); imagePoints = new MatOfPoint2f(); rvec = new Mat(); tvec = new Mat(); rotM = new Mat(3, 3, CvType.CV_64FC1); faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("shape_predictor_68_face_landmarks.dat")); rgbMat = new Mat(); capture = new VideoCapture(); capture.open(OpenCVForUnity.Utils.getFilePath("dance.avi")); if (capture.isOpened()) { Debug.Log("capture.isOpened() true"); } else { Debug.Log("capture.isOpened() false"); } Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT)); Debug.Log("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get(Videoio.CV_CAP_PROP_PREVIEW_FORMAT)); Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC)); Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES)); Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO)); Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT)); Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS)); Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH)); Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT)); colors = new Color32[(int)(frameWidth) * (int)(frameHeight)]; texture = new Texture2D((int)(frameWidth), (int)(frameHeight), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float imageScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent <Renderer> ().material.mainTexture = texture; int max_d = (int)Mathf.Max((float)frameHeight, (float)frameWidth); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, (float)frameWidth / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, (float)frameHeight / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size((float)frameWidth * imageScale, (float)frameHeight * imageScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); if (widthScale < heightScale) { ARCamera.fieldOfView = (float)fovx [0]; } else { ARCamera.fieldOfView = (float)fovy [0]; } Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem> (true); }
private IEnumerator init () { axes.SetActive (false); head.SetActive (false); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); if (webCamTexture != null) { faceTracker.reset (); webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); grayMat.Dispose (); cascade.Dispose (); camMatrix.Dispose (); distCoeffs.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32 (); yield return new WaitForEndOfFrame (); } #endif #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; updateLayout (); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return 0; } } }