private void OnCalibrate(Sizei resolution, Matrix3x3f intrinsic, Vector5f distorsion) { /* * var m = new Matrix4x4(); * for (int r = 0; r < 3; ++r) * { * for (int c = 0; c < 3; ++c) * { * m[r, c] = intrinsic.coeff(r, c); * } * } * Debug.Log(m); * Debug.Log(resolution.width); * Debug.Log(resolution.height); */ var fY = intrinsic.coeff(1, 1) * 2 / resolution.height; var fX = intrinsic.coeff(0, 0) * 2 / resolution.width; //var fovY = CameraUtility.Focal2Fov(fY, resolution.height); //var fovX = CameraUtility.Focal2Fov(fX, resolution.width); //camera.fieldOfView = fovY; //CameraUtility.ApplyProjectionMatrix() var projectionMatrix = Perspective(fX, fY, camera.nearClipPlane, camera.farClipPlane); camera.projectionMatrix = projectionMatrix; MoveVideoPlane(); }
private void OnCalibrate(Sizei resolution, Matrix3x3f intrinsic, Vector5f distorsion) { var fY = intrinsic.coeff(1, 1); var fovY = CameraUtility.Focal2Fov(fY, resolution.height); var fX = intrinsic.coeff(0, 0); var aspect = (fY / resolution.height) / (fX / resolution.width); var projectionMatrix = Matrix4x4.Perspective(fovY, aspect, camera.nearClipPlane, camera.farClipPlane); CameraUtility.ApplyProjectionMatrix(camera, projectionMatrix); }
public void Init() { m_pipelineManager = new SolARPluginPipelineManager(); #if UNITY_EDITOR // If in editor mode, the pipeline configuration file are stored in the unity assets folder but not in the streaminAssets folder if (!m_pipelineManager.init(Application.dataPath + m_configurationPath, m_uuid)) { Debug.Log("Cannot init pipeline manager " + Application.dataPath + m_configurationPath + " with uuid " + m_uuid); return; } #elif UNITY_ANDROID Screen.sleepTimeout = SleepTimeout.NeverSleep; Android.ReplacePathToApp(Application.persistentDataPath + "/StreamingAssets" + m_configurationPath); // When the application is built, only the pipeline configuration files used by the application are moved to the an external folder on terminal Debug.Log("[ANDROID] Load pipeline : " + Application.persistentDataPath + "/StreamingAssets" + m_configurationPath); if (!m_pipelineManager.init(Application.persistentDataPath + "/StreamingAssets" + m_configurationPath, m_uuid)) { Debug.Log("Cannot init pipeline manager " + Application.persistentDataPath + "/StreamingAssets" + m_configurationPath + " with uuid " + m_uuid); return; } Debug.Log("[ANDROID] Pipeline initialization successful"); //m_Unity_Webcam = true; #else // When the application is built, only the pipeline configuration files used by the application are moved to the streamingAssets folder if (!m_pipelineManager.init(Application.streamingAssetsPath + m_configurationPath, m_uuid)) { Debug.Log("Cannot init pipeline manager " + Application.streamingAssetsPath + m_configurationPath + " with uuid " + m_uuid); return; } //m_Unity_Webcam = true; #endif if (m_Unity_Webcam) { //m_webCamTexture = new WebCamTexture(WebCamTexture.devices[m_webCamNum].name, width, height); m_webCamTexture = deviceCameraScript.activeCameraTexture; m_webCamTexture.Play(); data = new Color32[width * height]; m_vidframe_byte = new byte[width * height * 3]; GetPhysicalCameraFrame(); } else { Matrix3x3f camParams = m_pipelineManager.getCameraParameters().intrinsic; width = Screen.width; height = Screen.height; focalX = camParams.coeff(0, 0); // focalX; focalY = camParams.coeff(1, 1); // focalY; centerX = camParams.coeff(0, 2); // centerX; centerY = camParams.coeff(1, 2); // centerY; } SendParametersToCameraProjectionMatrix(); array_imageData = new byte[width * height * 3]; IntPtr ptr = Marshal.UnsafeAddrOfPinnedArrayElement(array_imageData, 0); m_pipelineManager.start(ptr); //IntPtr //deviceCameraScript.UpdateScreenParams(); //StartCoroutine(deviceCameraScript.UpdateScreenParamsCoroutine()); UpdateReady = true; }