void OnDestroy() { m_pipelineManager.stop(); m_pipelineManager.Dispose(); m_pipelineManager = null; }
internal static global::System.Runtime.InteropServices.HandleRef getCPtr(SolARPluginPipelineManager obj) { return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr); }
public void Init() { m_pipelineManager = new SolARPluginPipelineManager(); #if UNITY_EDITOR // If in editor mode, the pipeline configuration file are stored in the unity assets folder but not in the streaminAssets folder if (!m_pipelineManager.init(Application.dataPath + m_configurationPath, m_uuid)) { Debug.Log("Cannot init pipeline manager " + Application.dataPath + m_configurationPath + " with uuid " + m_uuid); return; } #elif UNITY_ANDROID Screen.sleepTimeout = SleepTimeout.NeverSleep; Android.ReplacePathToApp(Application.persistentDataPath + "/StreamingAssets" + m_configurationPath); // When the application is built, only the pipeline configuration files used by the application are moved to the an external folder on terminal Debug.Log("[ANDROID] Load pipeline : " + Application.persistentDataPath + "/StreamingAssets" + m_configurationPath); if (!m_pipelineManager.init(Application.persistentDataPath + "/StreamingAssets" + m_configurationPath, m_uuid)) { Debug.Log("Cannot init pipeline manager " + Application.persistentDataPath + "/StreamingAssets" + m_configurationPath + " with uuid " + m_uuid); return; } Debug.Log("[ANDROID] Pipeline initialization successful"); //m_Unity_Webcam = true; #else // When the application is built, only the pipeline configuration files used by the application are moved to the streamingAssets folder if (!m_pipelineManager.init(Application.streamingAssetsPath + m_configurationPath, m_uuid)) { Debug.Log("Cannot init pipeline manager " + Application.streamingAssetsPath + m_configurationPath + " with uuid " + m_uuid); return; } //m_Unity_Webcam = true; #endif if (m_Unity_Webcam) { //m_webCamTexture = new WebCamTexture(WebCamTexture.devices[m_webCamNum].name, width, height); m_webCamTexture = deviceCameraScript.activeCameraTexture; m_webCamTexture.Play(); data = new Color32[width * height]; m_vidframe_byte = new byte[width * height * 3]; GetPhysicalCameraFrame(); } else { Matrix3x3f camParams = m_pipelineManager.getCameraParameters().intrinsic; width = Screen.width; height = Screen.height; focalX = camParams.coeff(0, 0); // focalX; focalY = camParams.coeff(1, 1); // focalY; centerX = camParams.coeff(0, 2); // centerX; centerY = camParams.coeff(1, 2); // centerY; } SendParametersToCameraProjectionMatrix(); array_imageData = new byte[width * height * 3]; IntPtr ptr = Marshal.UnsafeAddrOfPinnedArrayElement(array_imageData, 0); m_pipelineManager.start(ptr); //IntPtr //deviceCameraScript.UpdateScreenParams(); //StartCoroutine(deviceCameraScript.UpdateScreenParamsCoroutine()); UpdateReady = true; }