Wraps separate textures for Y, U, and V planes.
        /// <summary>
        /// Register to get Tango image events.
        /// 
        /// NOTE: Tango image events happen on a different thread than the main
        /// Unity thread.
        /// </summary>
        /// <param name="cameraId">Camera identifier to get events for.</param>
        /// <param name="useExperimentalOverlay">If true, use the experimental video overlay.</param>
        /// <param name="videoOverlayTexture">The video overlay texture to use.  Only used in experimental mode.</param> 
        internal virtual void SetCallback(Tango.TangoEnums.TangoCameraId cameraId, bool useExperimentalOverlay, YUVTexture videoOverlayTexture)
        {
            m_usingExperimentalOverlay = useExperimentalOverlay;
            if (!useExperimentalOverlay)
            {
                m_previousImageBuffer = new TangoUnityImageData();
                m_onImageAvailable = new Tango.VideoOverlayProvider.TangoService_onImageAvailable(_OnImageAvailable);
                Tango.VideoOverlayProvider.SetCallback(cameraId, m_onImageAvailable);
            }
            else
            {
                if (videoOverlayTexture != null)
                {
                    m_onUnityFrameAvailable = new Tango.VideoOverlayProvider.TangoService_onUnityFrameAvailable(_OnExperimentalUnityFrameAvailable);
                    VideoOverlayProvider.ExperimentalConnectTexture(cameraId,
                                                                    videoOverlayTexture,
                                                                    m_onUnityFrameAvailable);

                    Debug.Log("VideoOverlayListener.SetCallback() : Experimental Overlay listener hooked up");
                }
                else
                {
                    Debug.Log("VideoOverlayListener.SetCallback() : No Texture2D found!");
                }
            }
        }
Пример #2
0
        /// <summary>
        /// Awake this instance.
        /// </summary>
        private void Awake()
        {
            AndroidHelper.RegisterPauseEvent(_androidOnPause);
            AndroidHelper.RegisterResumeEvent(_androidOnResume);
            AndroidHelper.RegisterOnActivityResultEvent(_androidOnActivityResult);

            if (m_enableMotionTracking)
            {
                m_poseListener = new PoseListener();
            }

            if (m_enableDepth)
            {
                m_depthListener = new DepthListener();
            }

            if (m_enableUXLibrary)
            {
                m_tangoEventListener = new TangoEventListener();
            }

            if (m_enableVideoOverlay)
            {
                int yTextureWidth   = 0;
                int yTextureHeight  = 0;
                int uvTextureWidth  = 0;
                int uvTextureHeight = 0;

                m_yuvTexture           = new YUVTexture(yTextureWidth, yTextureHeight, uvTextureWidth, uvTextureHeight, TextureFormat.RGBA32, false);
                m_videoOverlayListener = new VideoOverlayListener();
            }
        }
        /// <summary>
        /// Experimental API only, subject to change.  Connect a Texture IDs to a camera.
        ///
        /// The camera is selected via TangoCameraId.  Currently only TANGO_CAMERA_COLOR is supported.  The texture
        /// handles will be regenerated by the API on startup after which the application can use them, and will be
        /// packed RGBA8888 data containing bytes of the image (so a single RGBA8888 will pack 4 neighbouring pixels).
        /// If the config flag experimental_image_pixel_format is set to HAL_PIXEL_FORMAT_YCrCb_420_SP, texture_y will
        /// pack 1280x720 pixels into a 320x720 RGBA8888 texture.  texture_Cb and texture_Cr will contain copies of
        /// the 2x2 downsampled interleaved UV planes packed similarly.  If experimental_image_pixel_format is set to
        /// HAL_PIXEL_FORMAT_YV12 then texture_y will have a stride of 1536 containing 1280 columns of data, packed
        /// similarly in a RGBA8888 texture. texture_Cb and texture_Cr will be 2x2 downsampled versions of the same.
        /// See YV12 and NV21 formats for details.
        ///
        /// Note: The first scan-line of the color image is reserved for metadata instead of image pixels.
        /// </summary>
        /// <param name="cameraId">
        /// The ID of the camera to connect this texture to.  Only TANGO_CAMERA_COLOR and TANGO_CAMERA_FISHEYE are
        /// supported.
        /// </param>
        /// <param name="textures">The texture IDs to use for the Y, Cb, and Cr planes.</param>
        /// <param name="onUnityFrameAvailable">Callback method.</param>
        internal static void ExperimentalConnectTexture(TangoEnums.TangoCameraId cameraId, YUVTexture textures, TangoService_onUnityFrameAvailable onUnityFrameAvailable)
        {
#if UNITY_EDITOR
            if (cameraId == TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR)
            {
                // Resize textures to to simulated width.
                textures.ResizeAll(EMULATED_CAMERA_PACKED_WIDTH, EMULATED_CAMERA_PACKED_Y_HEIGHT,
                                   EMULATED_CAMERA_PACKED_WIDTH, EMULATED_CAMERA_PACKED_UV_HEIGHT);

                if (!m_emulationIsInitialized)
                {
                    _InitializeResourcesForEmulation();
                    m_emulationIsInitialized = true;
                }

#if !UNITY_EDITOR_WIN
                // Rebind Texture2Ds to the underlying OpenGL texture ids of our render textures
                // Which is more or less the inverse of what the acutal tango service does but has the same effect.
                textures.m_videoOverlayTextureY.UpdateExternalTexture(m_emulatedExpId_Y.GetNativeTexturePtr());
                textures.m_videoOverlayTextureCb.UpdateExternalTexture(m_emulatedExpId_CbCr.GetNativeTexturePtr());
                textures.m_videoOverlayTextureCr.UpdateExternalTexture(m_emulatedExpId_CbCr.GetNativeTexturePtr());
#else   // !UNITY_EDITOR_WIN
                // A crash occurs when assigning the pointer of a Unity RenderTexture to a Texture2D (as above)
                // in a DirectX environment. Instead, size the Texture2D's correctly and copy render targets
                // with ReadPixels() when updating experimental textures.
                // Keeping separate paths because ReadPixels() is a significant performance hit.

                textures.m_videoOverlayTextureY.Resize(m_emulatedExpId_Y.width, m_emulatedExpId_Y.height);
                textures.m_videoOverlayTextureCb.Resize(m_emulatedExpId_CbCr.width, m_emulatedExpId_CbCr.height);

                m_emulationTexIdCaptureTextures = textures;
#endif  // !UNITY_EDITOR_WIN
            }
#else
            int returnValue = VideoOverlayAPI.TangoService_Experimental_connectTextureIdUnity(
                cameraId,
                (uint)textures.m_videoOverlayTextureY.GetNativeTexturePtr().ToInt64(),
                (uint)textures.m_videoOverlayTextureCb.GetNativeTexturePtr().ToInt64(),
                (uint)textures.m_videoOverlayTextureCr.GetNativeTexturePtr().ToInt64(),
                callbackContext,
                onUnityFrameAvailable);

            if (returnValue != Common.ErrorType.TANGO_SUCCESS)
            {
                Debug.Log("VideoOverlayProvider.ConnectTexture() Texture was not connected to camera!");
            }
#endif
        }
Пример #4
0
        /// <summary>
        /// DEPRECATED: Register to get Tango texture events for the texture ID is updated.
        ///
        /// NOTE: Tango texture events happen on a different thread than the main
        /// Unity thread.
        /// </summary>
        /// <param name="videoOverlayTexture">The video overlay texture to use.</param>
        internal void SetCallbackYUVTextureIdMethod(YUVTexture videoOverlayTexture)
        {
            if (videoOverlayTexture != null)
            {
                m_onYUVTextureAvailable =
                    new VideoOverlayProvider.TangoService_onTextureAvailable(_OnTangoYUVTextureAvailable);
                VideoOverlayProvider.ExperimentalConnectTexture(
                    COLOR_CAMERA_ID, videoOverlayTexture, m_onYUVTextureAvailable);

                Debug.Log("VideoOverlayListener.SetCallback() : YUVTexture listener hooked up");
            }
            else
            {
                Debug.Log("VideoOverlayListener.SetCallback() : No Texture2D found!");
            }
        }
Пример #5
0
    /// <summary>
    /// Set up cameras.
    /// </summary>
    private void Start()
    {
        Application.targetFrameRate = 60;
        EnableCamera();

        m_tangoApplication = FindObjectOfType<TangoApplication>();

        m_textures = m_tangoApplication.GetVideoOverlayTextureYUV();

        // Pass YUV textures to shader for process.
        m_screenMaterial.SetTexture("_YTex", m_textures.m_videoOverlayTextureY);
        m_screenMaterial.SetTexture("_UTex", m_textures.m_videoOverlayTextureCb);
        m_screenMaterial.SetTexture("_VTex", m_textures.m_videoOverlayTextureCr);

        m_tangoApplication.Register(this);
    }
Пример #6
0
    /// <summary>
    /// Initialize the AR Screen.
    /// </summary>
    private void Start()
    {
        // Constant matrix converting start of service frame to Unity world frame.
        m_uwTss = new Matrix4x4();
        m_uwTss.SetColumn( 0, new Vector4( 1.0f, 0.0f, 0.0f, 0.0f ) );
        m_uwTss.SetColumn( 1, new Vector4( 0.0f, 0.0f, 1.0f, 0.0f ) );
        m_uwTss.SetColumn( 2, new Vector4( 0.0f, 1.0f, 0.0f, 0.0f ) );
        m_uwTss.SetColumn( 3, new Vector4( 0.0f, 0.0f, 0.0f, 1.0f ) );

        // Constant matrix converting Unity world frame frame to device frame.
        m_cTuc.SetColumn( 0, new Vector4( 1.0f, 0.0f, 0.0f, 0.0f ) );
        m_cTuc.SetColumn( 1, new Vector4( 0.0f, -1.0f, 0.0f, 0.0f ) );
        m_cTuc.SetColumn( 2, new Vector4( 0.0f, 0.0f, 1.0f, 0.0f ) );
        m_cTuc.SetColumn( 3, new Vector4( 0.0f, 0.0f, 0.0f, 1.0f ) );

        //register for data callbacks
        m_tangoApplication = FindObjectOfType<TangoApplication>();

        if( m_tangoApplication != null ) {
            if( AndroidHelper.IsTangoCorePresent() ) {
                // Request Tango permissions
                m_tangoApplication.RegisterPermissionsCallback( _OnTangoApplicationPermissionsEvent );
                m_tangoApplication.RequestNecessaryPermissionsAndConnect();
                m_tangoApplication.Register( this );
            } else {
                // If no Tango Core is present let's tell the user to install it.
                Debug.Log( "Tango Core is outdated." );
            }
        } else {
            Debug.Log( "No Tango Manager found in scene." );
        }
        if( m_tangoApplication != null ) {
            m_textures = m_tangoApplication.GetVideoOverlayTextureYUV();

            lumaTexture.texture = m_textures.m_videoOverlayTextureY;
            chromaBlueTexture.texture = m_textures.m_videoOverlayTextureCb;
            chromaRedTexture.texture = m_textures.m_videoOverlayTextureCr;

            // Pass YUV textures to shader for process.
            //m_screenMaterial.SetTexture( "_YTex", m_textures.m_videoOverlayTextureY );
            //m_screenMaterial.SetTexture( "_UTex", m_textures.m_videoOverlayTextureCb );
            //m_screenMaterial.SetTexture( "_VTex", m_textures.m_videoOverlayTextureCr );
        }

        m_tangoApplication.Register( this );
    }
        /// <summary>
        /// Register to get Tango image events for the texture ID is updated.
        ///
        /// NOTE: Tango image events happen on a different thread than the main
        /// Unity thread.
        /// </summary>
        /// <param name="cameraId">Camera identifier to get events for.</param>
        /// <param name="videoOverlayTexture">The video overlay texture to use.</param>
        internal virtual void SetCallbackTextureIdMethod(Tango.TangoEnums.TangoCameraId cameraId,
                                                         YUVTexture videoOverlayTexture)
        {
            if (videoOverlayTexture != null)
            {
                m_onUnityFrameAvailable =
                    new Tango.VideoOverlayProvider.TangoService_onUnityFrameAvailable(_OnExperimentalUnityFrameAvailable);
                VideoOverlayProvider.ExperimentalConnectTexture(cameraId,
                                                                videoOverlayTexture,
                                                                m_onUnityFrameAvailable);

                Debug.Log("VideoOverlayListener.SetCallback() : Experimental Overlay listener hooked up");
            }
            else
            {
                Debug.Log("VideoOverlayListener.SetCallback() : No Texture2D found!");
            }
        }
Пример #8
0
        /// <summary>
        /// Register to get Tango image events for the texture ID is updated.
        /// 
        /// NOTE: Tango image events happen on a different thread than the main
        /// Unity thread.
        /// </summary>
        /// <param name="cameraId">Camera identifier to get events for.</param>
        /// <param name="videoOverlayTexture">The video overlay texture to use.</param> 
        internal virtual void SetCallbackTextureIdMethod(Tango.TangoEnums.TangoCameraId cameraId, 
                                                         YUVTexture videoOverlayTexture)
        {
            if (videoOverlayTexture != null)
            {
                m_onUnityFrameAvailable = 
                    new Tango.VideoOverlayProvider.TangoService_onUnityFrameAvailable(_OnExperimentalUnityFrameAvailable);
                VideoOverlayProvider.ExperimentalConnectTexture(cameraId,
                                                                videoOverlayTexture,
                                                                m_onUnityFrameAvailable);

                Debug.Log("VideoOverlayListener.SetCallback() : Experimental Overlay listener hooked up");
            }
            else
            {
                Debug.Log("VideoOverlayListener.SetCallback() : No Texture2D found!");
            }
        }
        /// <summary>
        /// DEPRECATED: Register to get Tango texture events for the texture ID is updated.
        ///
        /// NOTE: Tango texture events happen on a different thread than the main
        /// Unity thread.
        /// </summary>
        /// <param name="videoOverlayTexture">The video overlay texture to use.</param>
        internal static void SetCallbackYUVTextureIdMethod(YUVTexture videoOverlayTexture)
        {
            if (videoOverlayTexture != null)
            {
                if (m_onYUVTextureAvailable != null)
                {
                    Debug.Log("VideoOverlayProvider.SetCallbackYUVTextureIdMethod() called when a callback is already set.");
                    return;
                }

                Debug.Log("VideoOverlayProvider.SetCallbackYUVTextureIdMethod()");
                m_onYUVTextureAvailable =
                    new VideoOverlayProvider.APIOnTextureAvailable(_OnTangoYUVTextureAvailable);
                VideoOverlayProvider.ExperimentalConnectTexture(
                    COLOR_CAMERA_ID, videoOverlayTexture, m_onYUVTextureAvailable);
            }
            else
            {
                Debug.Log("VideoOverlayListener.SetCallbackYUVTextureIdMethod() : No Texture2D found!");
            }
        }
Пример #10
0
    /// <summary>
    /// Initialize the AR Screen.
    /// </summary>
    public void Start()
    {
        m_tangoApplication = FindObjectOfType<TangoApplication>();
        m_arCameraPostProcess = gameObject.GetComponent<ARCameraPostProcess>();
        if (m_tangoApplication != null)
        {
            m_tangoApplication.Register(this);

            // Pass YUV textures to shader for process.
            m_textures = m_tangoApplication.GetVideoOverlayTextureYUV();
            m_screenMaterial.SetTexture("_YTex", m_textures.m_videoOverlayTextureY);
            m_screenMaterial.SetTexture("_UTex", m_textures.m_videoOverlayTextureCb);
            m_screenMaterial.SetTexture("_VTex", m_textures.m_videoOverlayTextureCr);
        }

        if (m_enableOcclusion) 
        {
            TangoPointCloud pointCloud = FindObjectOfType<TangoPointCloud>();
            if (pointCloud != null)
            {
                Renderer renderer = pointCloud.GetComponent<Renderer>();
                renderer.enabled = true;

                // Set the renderpass as background renderqueue's number minus one. YUV2RGB shader executes in 
                // Background queue which is 1000.
                // But since we want to write depth data to Z buffer before YUV2RGB shader executes so that YUV2RGB 
                // data ignores Ztest from the depth data we set renderqueue of PointCloud as 999.
                renderer.material.renderQueue = BACKGROUND_RENDER_QUEUE - 1;
                renderer.material.SetFloat("point_size", POINTCLOUD_SPLATTER_UPSAMPLE_SIZE);
                pointCloud.m_updatePointsMesh = true;
            }
            else
            {
                Debug.Log("Point Cloud data is not available, occlusion is not possible.");
            }
        }
    }
Пример #11
0
        /// <summary>
        /// DEPRECATED: Connect a Texture IDs to a camera.
        ///
        /// The camera is selected via TangoCameraId.  Currently only TANGO_CAMERA_COLOR is supported.  The texture
        /// handles will be regenerated by the API on startup after which the application can use them, and will be
        /// packed RGBA8888 data containing bytes of the image (so a single RGBA8888 will pack 4 neighboring pixels).
        /// If the config flag experimental_image_pixel_format is set to HAL_PIXEL_FORMAT_YCrCb_420_SP, texture_y will
        /// pack 1280x720 pixels into a 320x720 RGBA8888 texture.  texture_Cb and texture_Cr will contain copies of
        /// the 2x2 downsampled interleaved UV planes packed similarly.  If experimental_image_pixel_format is set to
        /// HAL_PIXEL_FORMAT_YV12 then texture_y will have a stride of 1536 containing 1280 columns of data, packed
        /// similarly in a RGBA8888 texture. texture_Cb and texture_Cr will be 2x2 downsampled versions of the same.
        /// See YV12 and NV21 formats for details.
        ///
        /// Note: The first scan-line of the color image is reserved for metadata instead of image pixels.
        /// </summary>
        /// <param name="cameraId">
        /// The ID of the camera to connect this texture to.  Only TANGO_CAMERA_COLOR and TANGO_CAMERA_FISHEYE are
        /// supported.
        /// </param>
        /// <param name="textures">The texture IDs to use for the Y, Cb, and Cr planes.</param>
        /// <param name="callback">Callback method.</param>
        internal static void ExperimentalConnectTexture(
            TangoEnums.TangoCameraId cameraId, YUVTexture textures, APIOnTextureAvailable callback)
        {
#if UNITY_EDITOR
            if (cameraId == TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR)
            {
                m_emulatedExpId_Y    = (RenderTexture)textures.m_videoOverlayTextureY;
                m_emulatedExpId_CbCr = (RenderTexture)textures.m_videoOverlayTextureCb;
            }
#else
            int returnValue = API.TangoService_Experimental_connectTextureIdUnity(
                cameraId,
                (uint)textures.m_videoOverlayTextureY.GetNativeTexturePtr().ToInt64(),
                (uint)textures.m_videoOverlayTextureCb.GetNativeTexturePtr().ToInt64(),
                (uint)textures.m_videoOverlayTextureCr.GetNativeTexturePtr().ToInt64(),
                IntPtr.Zero,
                callback);

            if (returnValue != Common.ErrorType.TANGO_SUCCESS)
            {
                Debug.Log("VideoOverlayProvider.ConnectTexture() Texture was not connected to camera!");
            }
#endif
        }
 /// <summary>
 /// DEPRECATED: Register to get Tango texture events for the texture ID is updated.
 /// 
 /// NOTE: Tango texture events happen on a different thread than the main
 /// Unity thread.
 /// </summary>
 /// <param name="videoOverlayTexture">The video overlay texture to use.</param> 
 internal static void SetCallbackYUVTextureIdMethod(YUVTexture videoOverlayTexture)
 {
     if (videoOverlayTexture != null)
     {
         if (m_onYUVTextureAvailable != null)
         {
             Debug.Log("VideoOverlayProvider.SetCallbackYUVTextureIdMethod() called when a callback is already set.");
             return;
         }
         
         Debug.Log("VideoOverlayProvider.SetCallbackYUVTextureIdMethod()");
         m_onYUVTextureAvailable = 
             new VideoOverlayProvider.APIOnTextureAvailable(_OnTangoYUVTextureAvailable);
         VideoOverlayProvider.ExperimentalConnectTexture(
             COLOR_CAMERA_ID, videoOverlayTexture, m_onYUVTextureAvailable);
     }
     else
     {
         Debug.Log("VideoOverlayListener.SetCallbackYUVTextureIdMethod() : No Texture2D found!");
     }
 }
Пример #13
0
        /// <summary>
        /// Awake this instance.
        /// </summary>
        private void Awake()
        {
            AndroidHelper.RegisterPauseEvent(_androidOnPause);
            AndroidHelper.RegisterResumeEvent(_androidOnResume);
            AndroidHelper.RegisterOnActivityResultEvent(_androidOnActivityResult);
            AndroidHelper.RegisterOnScreenOrientationChangedEvent(_androidOnScreenOrientationChanged);

            // Setup listeners.
            m_tangoEventListener = new TangoEventListener();
            m_areaDescriptionEventListener = new AreaDescriptionEventListener();

            if (m_enableCloudADF)
            {
                m_tangoCloudEventListener = new TangoCloudEventListener();
            }

            if (m_enableMotionTracking)
            {
                m_poseListener = new PoseListener();
            }

            if (m_enableDepth)
            {
                m_depthListener = new DepthListener();
            }

            if (m_enableVideoOverlay)
            {
                int yTextureWidth = 0;
                int yTextureHeight = 0;
                int uvTextureWidth = 0;
                int uvTextureHeight = 0;

                m_yuvTexture = new YUVTexture(yTextureWidth, yTextureHeight, uvTextureWidth, uvTextureHeight, TextureFormat.RGBA32, false);
                m_videoOverlayListener = new VideoOverlayListener();
            }

            if (m_enable3DReconstruction)
            {
                m_tango3DReconstruction = new Tango3DReconstruction(m_3drResolutionMeters, m_3drGenerateColor, m_3drSpaceClearing);
                m_tango3DReconstruction.m_useAreaDescriptionPose = m_3drUseAreaDescriptionPose;
                m_tango3DReconstruction.m_sendColorToUpdate = m_3drGenerateColor;
            }

            // Setup configs.
            m_tangoConfig = new TangoConfig(TangoEnums.TangoConfigType.TANGO_CONFIG_DEFAULT);
            m_tangoRuntimeConfig = new TangoConfig(TangoEnums.TangoConfigType.TANGO_CONFIG_RUNTIME);

            TangoSupport.UpdateCurrentRotationIndex();
        }
        /// <summary>
        /// Awake this instance.
        /// </summary>
        private void Awake()
        {
            AndroidHelper.RegisterPauseEvent(_androidOnPause);
            AndroidHelper.RegisterResumeEvent(_androidOnResume);
            AndroidHelper.RegisterOnActivityResultEvent(_androidOnActivityResult);

            if (m_enableMotionTracking)
            {
                m_poseListener = new PoseListener();
            }

            if (m_enableDepth)
            {
                m_depthListener = new DepthListener();
            }

            if (m_enableUXLibrary)
            {
                m_tangoEventListener = new TangoEventListener();
            }

            if (m_enableVideoOverlay)
            {
                int yTextureWidth = 0;
                int yTextureHeight = 0;
                int uvTextureWidth = 0;
                int uvTextureHeight = 0;

                m_yuvTexture = new YUVTexture(yTextureWidth, yTextureHeight, uvTextureWidth, uvTextureHeight, TextureFormat.RGBA32, false);
                m_videoOverlayListener = new VideoOverlayListener();
            }
        }
Пример #15
0
        /// <summary>
        /// Awake this instance.
        /// </summary>
        private void Awake()
        {
            AndroidHelper.RegisterPauseEvent(_androidOnPause);
            AndroidHelper.RegisterResumeEvent(_androidOnResume);
            AndroidHelper.RegisterOnActivityResultEvent(_androidOnActivityResult);

            // Setup listeners.
            m_tangoEventListener = new TangoEventListener();
            m_areaDescriptionEventListener = new AreaDescriptionEventListener();

            if (m_enableCloudADF)
            {
                m_tangoCloudEventListener = new TangoCloudEventListener();
            }

            if (m_enableMotionTracking)
            {
                m_poseListener = new PoseListener();
            }

            if (m_enableDepth)
            {
                m_depthListener = new DepthListener();
            }

            if (m_enableVideoOverlay)
            {
                int yTextureWidth = 0;
                int yTextureHeight = 0;
                int uvTextureWidth = 0;
                int uvTextureHeight = 0;

                m_yuvTexture = new YUVTexture(yTextureWidth, yTextureHeight, uvTextureWidth, uvTextureHeight, TextureFormat.RGBA32, false);
                m_videoOverlayListener = new VideoOverlayListener();
            }

            // Setup configs.
            m_tangoConfig = new TangoConfig(TangoEnums.TangoConfigType.TANGO_CONFIG_DEFAULT);
            m_tangoRuntimeConfig = new TangoConfig(TangoEnums.TangoConfigType.TANGO_CONFIG_RUNTIME);
        }
Пример #16
0
        /// <summary>
        /// Register to get Tango image events.
        ///
        /// NOTE: Tango image events happen on a different thread than the main
        /// Unity thread.
        /// </summary>
        /// <param name="cameraId">Camera identifier to get events for.</param>
        /// <param name="useExperimentalOverlay">If true, use the experimental video overlay.</param>
        /// <param name="videoOverlayTexture">The video overlay texture to use.  Only used in experimental mode.</param>
        internal virtual void SetCallback(Tango.TangoEnums.TangoCameraId cameraId, bool useExperimentalOverlay, YUVTexture videoOverlayTexture)
        {
            m_usingExperimentalOverlay = useExperimentalOverlay;
            if (!useExperimentalOverlay)
            {
                m_previousImageBuffer = new TangoUnityImageData();
                m_onImageAvailable    = new Tango.VideoOverlayProvider.TangoService_onImageAvailable(_OnImageAvailable);
                Tango.VideoOverlayProvider.SetCallback(cameraId, m_onImageAvailable);
            }
            else
            {
                if (videoOverlayTexture != null)
                {
                    m_onUnityFrameAvailable = new Tango.VideoOverlayProvider.TangoService_onUnityFrameAvailable(_OnExperimentalUnityFrameAvailable);
                    VideoOverlayProvider.ExperimentalConnectTexture(cameraId,
                                                                    videoOverlayTexture,
                                                                    m_onUnityFrameAvailable);

                    Debug.Log("VideoOverlayListener.SetCallback() : Experimental Overlay listener hooked up");
                }
                else
                {
                    Debug.Log("VideoOverlayListener.SetCallback() : No Texture2D found!");
                }
            }
        }
Пример #17
0
        // Use this for initialization
        void Start()
        {
            m_tangoApplication = FindObjectOfType<TangoApplication>();

            m_scanBounds = GetComponent<BoxCollider>();

            m_exportLock = new Mutex();
            m_progressLock = new Mutex();

            m_tangoApplication.RegisterPermissionsCallback( _OnTangoApplicationPermissionsEvent );
            m_tangoApplication.RequestNecessaryPermissionsAndConnect();

            //m_tangoApplication.Register( this );

            m_request = TangoPoseRequest.IMU_TO_DEVICE | TangoPoseRequest.IMU_TO_CAMERA_DEPTH | TangoPoseRequest.IMU_TO_CAMERA_COLOR;

            m_yuvTexture = m_tangoApplication.GetVideoOverlayTextureYUV();

            //// Pass YUV textures to shader for process.
            //m_screenMaterial.SetTexture( "_YTex", m_yuvTexture.m_videoOverlayTextureY );
            //m_screenMaterial.SetTexture( "_UTex", m_yuvTexture.m_videoOverlayTextureCb );
            //m_screenMaterial.SetTexture( "_VTex", m_yuvTexture.m_videoOverlayTextureCr );

            m_meshes = new Stack<MeshFilter>();
            m_isExporting = false;

            if( m_tangoApplication.m_useExperimentalVideoOverlay ) {
                VideoOverlayProvider.ExperimentalConnectTexture( TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_yuvTexture, OnExperimentalFrameAvailable );
            } else {
                VideoOverlayProvider.ConnectTexture( TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_colourBuffer.GetNativeTextureID() );
            }

            TangoUtility.Init();
        }
Пример #18
0
        public static void ExperimentalConnectTexture(TangoEnums.TangoCameraId cameraId, YUVTexture textures, TangoService_onUnityFrameAvailable onUnityFrameAvailable)
        {
            int returnValue = VideoOverlayAPI.TangoService_Experimental_connectTextureIdUnity(cameraId,
                                                                                              (uint)textures.m_videoOverlayTextureY.GetNativeTextureID(),
                                                                                              (uint)textures.m_videoOverlayTextureCb.GetNativeTextureID(),
                                                                                              (uint)textures.m_videoOverlayTextureCr.GetNativeTextureID(),
                                                                                              callbackContext,
                                                                                              onUnityFrameAvailable);

            if (returnValue != Common.ErrorType.TANGO_SUCCESS)
            {
                Debug.Log("VideoOverlayProvider.ConnectTexture() Texture was not connected to camera!");
            }
        }
Пример #19
0
    /// <summary>
    /// Initialize the AR Screen.
    /// </summary>
    private void Start()
    {
        m_tangoApplication = FindObjectOfType<TangoApplication>();
        if (m_tangoApplication != null)
        {
            m_tangoApplication.RegisterOnTangoConnect(_SetCameraIntrinsics);

            // Pass YUV textures to shader for process.
            m_textures = m_tangoApplication.GetVideoOverlayTextureYUV();
            m_screenMaterial.SetTexture("_YTex", m_textures.m_videoOverlayTextureY);
            m_screenMaterial.SetTexture("_UTex", m_textures.m_videoOverlayTextureCb);
            m_screenMaterial.SetTexture("_VTex", m_textures.m_videoOverlayTextureCr);
        }
    }
Пример #20
0
 /// <summary>
 /// Experimental API only, subject to change.  Connect a Texture IDs to a camera.
 /// 
 /// The camera is selected via TangoCameraId.  Currently only TANGO_CAMERA_COLOR is supported.  The texture
 /// handles will be regenerated by the API on startup after which the application can use them, and will be
 /// packed RGBA8888 data containing bytes of the image (so a single RGBA8888 will pack 4 neighbouring pixels).
 /// If the config flag experimental_image_pixel_format is set to HAL_PIXEL_FORMAT_YCrCb_420_SP, texture_y will
 /// pack 1280x720 pixels into a 320x720 RGBA8888 texture.  texture_Cb and texture_Cr will contain copies of
 /// the 2x2 downsampled interleaved UV planes packed similarly.  If experimental_image_pixel_format is set to
 /// HAL_PIXEL_FORMAT_YV12 then texture_y will have a stride of 1536 containing 1280 columns of data, packed
 /// similarly in a RGBA8888 texture. texture_Cb and texture_Cr will be 2x2 downsampled versions of the same.  
 /// See YV12 and NV21 formats for details.
 /// 
 /// Note: The first scan-line of the color image is reserved for metadata instead of image pixels.
 /// </summary>
 /// <param name="cameraId">
 /// The ID of the camera to connect this texture to.  Only TANGO_CAMERA_COLOR and TANGO_CAMERA_FISHEYE are
 /// supported.
 /// </param>
 /// <param name="textures">The texture IDs to use for the Y, Cb, and Cr planes.</param>
 /// <param name="onUnityFrameAvailable">Callback method.</param>
 internal static void ExperimentalConnectTexture(TangoEnums.TangoCameraId cameraId, YUVTexture textures, TangoService_onUnityFrameAvailable onUnityFrameAvailable)
 {
     int returnValue = VideoOverlayAPI.TangoService_Experimental_connectTextureIdUnity(cameraId, 
                                                                                       (uint)textures.m_videoOverlayTextureY.GetNativeTextureID(), 
                                                                                       (uint)textures.m_videoOverlayTextureCb.GetNativeTextureID(), 
                                                                                       (uint)textures.m_videoOverlayTextureCr.GetNativeTextureID(), 
                                                                                       callbackContext, 
                                                                                       onUnityFrameAvailable);
     
     if (returnValue != Common.ErrorType.TANGO_SUCCESS)
     {
         Debug.Log("VideoOverlayProvider.ConnectTexture() Texture was not connected to camera!");
     }
 }
        /// <summary>
        /// DEPRECATED: Connect a Texture IDs to a camera.
        /// 
        /// The camera is selected via TangoCameraId.  Currently only TANGO_CAMERA_COLOR is supported.  The texture
        /// handles will be regenerated by the API on startup after which the application can use them, and will be
        /// packed RGBA8888 data containing bytes of the image (so a single RGBA8888 will pack 4 neighboring pixels).
        /// If the config flag experimental_image_pixel_format is set to HAL_PIXEL_FORMAT_YCrCb_420_SP, texture_y will
        /// pack 1280x720 pixels into a 320x720 RGBA8888 texture.  texture_Cb and texture_Cr will contain copies of
        /// the 2x2 downsampled interleaved UV planes packed similarly.  If experimental_image_pixel_format is set to
        /// HAL_PIXEL_FORMAT_YV12 then texture_y will have a stride of 1536 containing 1280 columns of data, packed
        /// similarly in a RGBA8888 texture. texture_Cb and texture_Cr will be 2x2 downsampled versions of the same.  
        /// See YV12 and NV21 formats for details.
        /// 
        /// Note: The first scan-line of the color image is reserved for metadata instead of image pixels.
        /// </summary>
        /// <param name="cameraId">
        /// The ID of the camera to connect this texture to.  Only TANGO_CAMERA_COLOR and TANGO_CAMERA_FISHEYE are
        /// supported.
        /// </param>
        /// <param name="textures">The texture IDs to use for the Y, Cb, and Cr planes.</param>
        /// <param name="callback">Callback method.</param>
        internal static void ExperimentalConnectTexture(
            TangoEnums.TangoCameraId cameraId, YUVTexture textures, APIOnTextureAvailable callback)
        {
#if UNITY_EDITOR
            if (cameraId == TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR)
            {
                m_emulatedExpId_Y = (RenderTexture)textures.m_videoOverlayTextureY;
                m_emulatedExpId_CbCr = (RenderTexture)textures.m_videoOverlayTextureCb;
            }
#else
            int returnValue = API.TangoService_Experimental_connectTextureIdUnity(
                cameraId, 
                (uint)textures.m_videoOverlayTextureY.GetNativeTexturePtr().ToInt64(), 
                (uint)textures.m_videoOverlayTextureCb.GetNativeTexturePtr().ToInt64(), 
                (uint)textures.m_videoOverlayTextureCr.GetNativeTexturePtr().ToInt64(), 
                IntPtr.Zero, 
                callback);
            
            if (returnValue != Common.ErrorType.TANGO_SUCCESS)
            {
                Debug.Log("VideoOverlayProvider.ConnectTexture() Texture was not connected to camera!");
            }
#endif
        }
Пример #22
0
        /// <summary>
        /// Awake this instance.
        /// </summary>
        private void Awake()
        {
            if (!AndroidHelper.LoadTangoLibrary())
            {
                Debug.Log("Unable to load Tango library.  Things may not work.");
                return;
            }

            AndroidHelper.RegisterPauseEvent(_androidOnPause);
            AndroidHelper.RegisterResumeEvent(_androidOnResume);
            AndroidHelper.RegisterOnActivityResultEvent(_androidOnActivityResult);
            AndroidHelper.RegisterOnDisplayChangedEvent(_androidOnDisplayChanged);
            AndroidHelper.RegisterOnTangoServiceConnected(_androidOnTangoServiceConnected);
            AndroidHelper.RegisterOnTangoServiceDisconnected(_androidOnTangoServiceDisconnected);

            if (m_enableDepth)
            {
                DepthListener.SetPointCloudLimit(m_initialPointCloudMaxPoints);
            }

            if (m_enableVideoOverlay)
            {
                int yTextureWidth = 0;
                int yTextureHeight = 0;
                int uvTextureWidth = 0;
                int uvTextureHeight = 0;

                m_yuvTexture = new YUVTexture(yTextureWidth, yTextureHeight, uvTextureWidth, uvTextureHeight, TextureFormat.RGBA32, false);
            }

            if (m_enable3DReconstruction)
            {
                m_tango3DReconstruction = new Tango3DReconstruction(
                    resolution: m_3drResolutionMeters,
                    generateColor: m_3drGenerateColor,
                    spaceClearing: m_3drSpaceClearing,
                    minNumVertices: m_3drMinNumVertices,
                    updateMethod: m_3drUpdateMethod);
                m_tango3DReconstruction.m_useAreaDescriptionPose = m_3drUseAreaDescriptionPose;
                m_tango3DReconstruction.m_sendColorToUpdate = m_3drGenerateColor;
            }

            TangoSupport.UpdatePoseMatrixFromDeviceRotation(AndroidHelper.GetDisplayRotation(),
                                                            AndroidHelper.GetColorCameraRotation());

            if (m_adjustScreenResolution)
            {
                _ChangeResolutionForPerformance();
            }

            // Importing and exporting Area Descriptions can be done before you connect. We must
            // propogate those events if they happen.
            AreaDescriptionEventListener.SetCallback();

#if UNITY_EDITOR
            if (m_doSlowEmulation && (m_enableDepth || m_enableVideoOverlay))
            {
                if (m_emulationEnvironment == null)
                {
                    Debug.LogError("No Mesh for Emulation assigned on the Tango Application (commonly in the Tango Manager prefab)."
                                   + " Expect blank camera and/or depth frames.");
                }

                EmulatedEnvironmentRenderHelper.InitForEnvironment(m_emulationEnvironment, m_emulationEnvironmentTexture, m_emulationVideoOverlaySimpleLighting);
            }
            else
            {
                EmulatedEnvironmentRenderHelper.Clear();
            }
#endif
        }