/// <summary> /// Raises the webcam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); InitializeCalibraton(webCamTextureMat); // if WebCamera is frontFaceing, flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat rawSizeMat = webCamTextureToMatHelper.GetMat(); float rawSizeWidth = rawSizeMat.width(); float rawSizeHeight = rawSizeMat.height(); Mat webCamTextureMat = imageOptimizationHelper.GetDownScaleMat(rawSizeMat); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false); previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture; previewQuad.transform.localScale = new Vector3(1, height / width, 1); previewQuad.SetActive(displayCameraPreview); // set camera parameters. double fx; double fy; double cx; double cy; string loadDirectoryPath = Path.Combine(Application.persistentDataPath, "HoloLensArUcoCameraCalibrationExample"); string calibratonDirectoryName = "camera_parameters" + rawSizeWidth + "x" + rawSizeHeight; string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName); string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml"); if (useStoredCameraParameters && File.Exists(loadPath)) { CameraParameters param; XmlSerializer serializer = new XmlSerializer(typeof(CameraParameters)); using (var stream = new FileStream(loadPath, FileMode.Open)) { param = (CameraParameters)serializer.Deserialize(stream); } fx = param.camera_matrix[0]; fy = param.camera_matrix[4]; cx = param.camera_matrix[2] / imageOptimizationHelper.downscaleRatio; cy = param.camera_matrix[5] / imageOptimizationHelper.downscaleRatio; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); distCoeffs = new MatOfDouble(param.GetDistortionCoefficients()); Debug.Log("Loaded CameraParameters from a stored XML file."); Debug.Log("loadPath: " + loadPath); } else { fx = this.fx; fy = this.fy; cx = this.cx / imageOptimizationHelper.downscaleRatio; cy = this.cy / imageOptimizationHelper.downscaleRatio; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); distCoeffs = new MatOfDouble(this.distCoeffs1, this.distCoeffs2, this.distCoeffs3, this.distCoeffs4, this.distCoeffs5); Debug.Log("Created a dummy CameraParameters."); } Debug.Log("camMatrix " + camMatrix.dump()); Debug.Log("distCoeffs " + distCoeffs.dump()); //Calibration camera Size imageSize = new Size(width, height); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); // Display objects near the camera. arCamera.nearClipPlane = 0.01f; grayMat = new Mat(); ids = new Mat(); corners = new List <Mat> (); rejectedCorners = new List <Mat> (); rvecs = new Mat(); tvecs = new Mat(); rotMat = new Mat(3, 3, CvType.CV_64FC1); transformationM = new Matrix4x4(); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); detectorParams = DetectorParameters.create(); dictionary = Aruco.getPredefinedDictionary(Aruco.DICT_6X6_250); //If WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } downScaleFrameMat = new Mat((int)height, (int)width, CvType.CV_8UC4); rgbMat4preview = new Mat(); }