public GetVideoOverlayTextureYUV ( ) : |
||
return |
/// <summary> /// Set up cameras. /// </summary> private void Start() { Application.targetFrameRate = 60; EnableCamera(); m_tangoApplication = FindObjectOfType<TangoApplication>(); m_textures = m_tangoApplication.GetVideoOverlayTextureYUV(); // Pass YUV textures to shader for process. m_screenMaterial.SetTexture("_YTex", m_textures.m_videoOverlayTextureY); m_screenMaterial.SetTexture("_UTex", m_textures.m_videoOverlayTextureCb); m_screenMaterial.SetTexture("_VTex", m_textures.m_videoOverlayTextureCr); m_tangoApplication.Register(this); }
/// <summary> /// Initialize the AR Screen. /// </summary> private void Start() { // Constant matrix converting start of service frame to Unity world frame. m_uwTss = new Matrix4x4(); m_uwTss.SetColumn( 0, new Vector4( 1.0f, 0.0f, 0.0f, 0.0f ) ); m_uwTss.SetColumn( 1, new Vector4( 0.0f, 0.0f, 1.0f, 0.0f ) ); m_uwTss.SetColumn( 2, new Vector4( 0.0f, 1.0f, 0.0f, 0.0f ) ); m_uwTss.SetColumn( 3, new Vector4( 0.0f, 0.0f, 0.0f, 1.0f ) ); // Constant matrix converting Unity world frame frame to device frame. m_cTuc.SetColumn( 0, new Vector4( 1.0f, 0.0f, 0.0f, 0.0f ) ); m_cTuc.SetColumn( 1, new Vector4( 0.0f, -1.0f, 0.0f, 0.0f ) ); m_cTuc.SetColumn( 2, new Vector4( 0.0f, 0.0f, 1.0f, 0.0f ) ); m_cTuc.SetColumn( 3, new Vector4( 0.0f, 0.0f, 0.0f, 1.0f ) ); //register for data callbacks m_tangoApplication = FindObjectOfType<TangoApplication>(); if( m_tangoApplication != null ) { if( AndroidHelper.IsTangoCorePresent() ) { // Request Tango permissions m_tangoApplication.RegisterPermissionsCallback( _OnTangoApplicationPermissionsEvent ); m_tangoApplication.RequestNecessaryPermissionsAndConnect(); m_tangoApplication.Register( this ); } else { // If no Tango Core is present let's tell the user to install it. Debug.Log( "Tango Core is outdated." ); } } else { Debug.Log( "No Tango Manager found in scene." ); } if( m_tangoApplication != null ) { m_textures = m_tangoApplication.GetVideoOverlayTextureYUV(); lumaTexture.texture = m_textures.m_videoOverlayTextureY; chromaBlueTexture.texture = m_textures.m_videoOverlayTextureCb; chromaRedTexture.texture = m_textures.m_videoOverlayTextureCr; // Pass YUV textures to shader for process. //m_screenMaterial.SetTexture( "_YTex", m_textures.m_videoOverlayTextureY ); //m_screenMaterial.SetTexture( "_UTex", m_textures.m_videoOverlayTextureCb ); //m_screenMaterial.SetTexture( "_VTex", m_textures.m_videoOverlayTextureCr ); } m_tangoApplication.Register( this ); }
/// <summary> /// Initialize the AR Screen. /// </summary> public void Start() { m_tangoApplication = FindObjectOfType<TangoApplication>(); m_arCameraPostProcess = gameObject.GetComponent<ARCameraPostProcess>(); if (m_tangoApplication != null) { m_tangoApplication.Register(this); // Pass YUV textures to shader for process. m_textures = m_tangoApplication.GetVideoOverlayTextureYUV(); m_screenMaterial.SetTexture("_YTex", m_textures.m_videoOverlayTextureY); m_screenMaterial.SetTexture("_UTex", m_textures.m_videoOverlayTextureCb); m_screenMaterial.SetTexture("_VTex", m_textures.m_videoOverlayTextureCr); } if (m_enableOcclusion) { TangoPointCloud pointCloud = FindObjectOfType<TangoPointCloud>(); if (pointCloud != null) { Renderer renderer = pointCloud.GetComponent<Renderer>(); renderer.enabled = true; // Set the renderpass as background renderqueue's number minus one. YUV2RGB shader executes in // Background queue which is 1000. // But since we want to write depth data to Z buffer before YUV2RGB shader executes so that YUV2RGB // data ignores Ztest from the depth data we set renderqueue of PointCloud as 999. renderer.material.renderQueue = BACKGROUND_RENDER_QUEUE - 1; renderer.material.SetFloat("point_size", POINTCLOUD_SPLATTER_UPSAMPLE_SIZE); pointCloud.m_updatePointsMesh = true; } else { Debug.Log("Point Cloud data is not available, occlusion is not possible."); } } }
/// <summary> /// Initialize the AR Screen. /// </summary> private void Start() { m_tangoApplication = FindObjectOfType<TangoApplication>(); if (m_tangoApplication != null) { m_tangoApplication.RegisterOnTangoConnect(_SetCameraIntrinsics); // Pass YUV textures to shader for process. m_textures = m_tangoApplication.GetVideoOverlayTextureYUV(); m_screenMaterial.SetTexture("_YTex", m_textures.m_videoOverlayTextureY); m_screenMaterial.SetTexture("_UTex", m_textures.m_videoOverlayTextureCb); m_screenMaterial.SetTexture("_VTex", m_textures.m_videoOverlayTextureCr); } }
// Use this for initialization void Start() { m_tangoApplication = FindObjectOfType<TangoApplication>(); m_scanBounds = GetComponent<BoxCollider>(); m_exportLock = new Mutex(); m_progressLock = new Mutex(); m_tangoApplication.RegisterPermissionsCallback( _OnTangoApplicationPermissionsEvent ); m_tangoApplication.RequestNecessaryPermissionsAndConnect(); //m_tangoApplication.Register( this ); m_request = TangoPoseRequest.IMU_TO_DEVICE | TangoPoseRequest.IMU_TO_CAMERA_DEPTH | TangoPoseRequest.IMU_TO_CAMERA_COLOR; m_yuvTexture = m_tangoApplication.GetVideoOverlayTextureYUV(); //// Pass YUV textures to shader for process. //m_screenMaterial.SetTexture( "_YTex", m_yuvTexture.m_videoOverlayTextureY ); //m_screenMaterial.SetTexture( "_UTex", m_yuvTexture.m_videoOverlayTextureCb ); //m_screenMaterial.SetTexture( "_VTex", m_yuvTexture.m_videoOverlayTextureCr ); m_meshes = new Stack<MeshFilter>(); m_isExporting = false; if( m_tangoApplication.m_useExperimentalVideoOverlay ) { VideoOverlayProvider.ExperimentalConnectTexture( TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_yuvTexture, OnExperimentalFrameAvailable ); } else { VideoOverlayProvider.ConnectTexture( TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_colourBuffer.GetNativeTextureID() ); } TangoUtility.Init(); }