void updateAndroidTexture_YUV()
    {
        if (m_NativeSession == null)
        {
            return;
        }

        IntPtr poseHandle = m_NativeSession.PoseApi.Create();

        m_NativeSession.updateTexture_y_memcpy_uv_assign_RGB_pose(_videoTexture_Y_bytes, _videoTexture_uv_RGB_bytes, poseHandle);

        Pose resultPose = m_NativeSession.PoseApi.ExtractPoseValue(poseHandle);

        SenseARCameraPose.SetPose(resultPose);

        m_NativeSession.PoseApi.Destroy(poseHandle);

        _videoTextureY.LoadRawTextureData(_videoTexture_Y_bytes);
        _videoTextureY.Apply();

        _videoTextureUV.LoadRawTextureData(_videoTexture_uv_RGB_bytes);
        _videoTextureUV.Apply();

        //Test for texture_y and texture_uv
        //StartCoroutine (UploadPNG (_videoTextureY.EncodeToPNG()));
        //StartCoroutine (UploadPNG (_videoTextureUV.EncodeToPNG()));

        _Material.SetTexture("_uvTex", _videoTextureUV);
        _Material.mainTexture = _videoTextureY;
        if (LinearColorSpace)
        {
            _Material.SetFloat("_gamma", 2.02f);
        }
    }
    void UpdatePose()
    {
        if (m_NativeSession == null)
        {
            return;
        }

        IntPtr poseHandle = m_NativeSession.PoseApi.Create();

        m_NativeSession.updateTexture_y_memcpy_uv_assign_RGB_pose(_videoTexture_Y_bytes, _videoTexture_uv_RGB_bytes, poseHandle);

        Pose resultPose = m_NativeSession.PoseApi.ExtractPoseValue(poseHandle);

        standardARCameraPose.SetPose(resultPose);

        m_NativeSession.PoseApi.Destroy(poseHandle);
    }