void Awake() { _invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); _invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); // 顔の初期位置を設定 _objectPoints = new MatOfPoint3f( new Point3(-31, 72, 86), // 左目 new Point3(31, 72, 86), // 右目 new Point3(0, 40, 114), // 鼻 new Point3(-20, 15, 90), // 左口角 new Point3(20, 15, 90), // 右口角 new Point3(-69, 76, -2), // 左耳 new Point3(69, 76, -2) // 右耳 ); _imagePoints = new MatOfPoint2f(); _rotM = new Mat(3, 3, CvType.CV_64FC1); // カメラの内部パラメータ float maxD = Mathf.Max(normHeight, normWidth); float fx = maxD; float fy = maxD; float cx = normWidth / 2.0f; float cy = normHeight / 2.0f; _camMatrix = new Mat(3, 3, CvType.CV_64FC1); _camMatrix.put(0, 0, fx); _camMatrix.put(0, 1, 0); _camMatrix.put(0, 2, cx); _camMatrix.put(1, 0, 0); _camMatrix.put(1, 1, fy); _camMatrix.put(1, 2, cy); _camMatrix.put(2, 0, 0); _camMatrix.put(2, 1, 0); _camMatrix.put(2, 2, 1.0f); _distCoeffs = new MatOfDouble(0, 0, 0, 0); // カメラキャリブレーション Matrix4x4 P = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, normWidth, normHeight, 0.3f, 2000f); Matrix4x4 V = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); _VP = P * V; _normPoints = new List <Vector2>(68); for (int i = 0; i < 68; i++) { _normPoints.Add(new Vector2(0, 0)); } }
protected virtual void SetCameraMatrix(Mat camMatrix, float width, float height) { double max_d = (double)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0; double cy = height / 2.0; double[] arr = new double[] { fx, 0, cx, 0, fy, cy, 0, 0, 1.0 }; camMatrix.put(0, 0, arr); // create AR camera P * V Matrix Matrix4x4 P = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 1f, 3000f); Matrix4x4 V = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); VP = P * V; }
private void SetCameraMatrix(Mat camMatrix, float width, float height) { float maxD = Mathf.Max(width, height); float fx = maxD; float fy = maxD; float cx = width / 2.0f; float cy = height / 2.0f; var array = new double[] { fx, 0, cx, 0, fy, cy, 0, 0, 1.0 }; camMatrix.put(0, 0, array); //NOTE: ここのnear/farは[mm]単位のはず、と思って、物理カメラほぼ準拠であろうこの値を使います。 //Unity内カメラのnear/farとは特に関係ないので注意! var p = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues( fx, fy, cx, cy, width, height, 1, 3000 ); _vp = p * _invertZM; }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); ScreenQuad.GetComponent <Renderer> ().material.mainTexture = texture; Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { } else { imageSizeScale = (float)Screen.height / (float)Screen.width; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //resize screen Quad Matrix4x4 p = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 0.3f, 1000f); Vector3 cameraSpacePos = UnProjectVector(p, new Vector3(1.0f, 1.0f, 1.0f)); if (widthScale > heightScale) { ScreenQuad.transform.localScale = new Vector3(cameraSpacePos.x * 2f, cameraSpacePos.x * height / width * 2f, 1); } else { ScreenQuad.transform.localScale = new Vector3(cameraSpacePos.y * width / height * 2f, cameraSpacePos.y * 2f, 1); } //create markerDetector MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); //if WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }