public static void InitExtrinsics( TangoPoseRequest requests )
        {
            double timestamp = 0.0;
            TangoCoordinateFramePair pair;
            TangoPoseData poseData = new TangoPoseData();

            Vector3 one = Vector3.one;
            Vector3 position;
            Quaternion quat;

            //FIXME this could get called multiple times. Check for that shit.
            if( (requests & TangoPoseRequest.IMU_TO_DEVICE) == TangoPoseRequest.IMU_TO_DEVICE ) {
                // Query the extrinsics between IMU and device frame.
                pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU;
                pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE;
                PoseProvider.GetPoseAtTime( poseData, timestamp, pair );
                position = new Vector3( (float)poseData.translation[0],
                                               (float)poseData.translation[1],
                                               (float)poseData.translation[2] );
                quat = new Quaternion( (float)poseData.orientation[0],
                                                 (float)poseData.orientation[1],
                                                 (float)poseData.orientation[2],
                                                 (float)poseData.orientation[3] );
                m_imuTd = Matrix4x4.TRS( position, quat, one );

            }

            if( (requests & TangoPoseRequest.IMU_TO_CAMERA_DEPTH) == TangoPoseRequest.IMU_TO_CAMERA_DEPTH ) {
                // Query the extrinsics between IMU and color camera frame.
                pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU;
                pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_DEPTH;
                PoseProvider.GetPoseAtTime( poseData, timestamp, pair );
                position = new Vector3( (float)poseData.translation[0],
                                       (float)poseData.translation[1],
                                       (float)poseData.translation[2] );
                quat = new Quaternion( (float)poseData.orientation[0],
                                      (float)poseData.orientation[1],
                                      (float)poseData.orientation[2],
                                      (float)poseData.orientation[3] );
                m_imuTdc = Matrix4x4.TRS( position, quat, one );
            }

            if( (requests & TangoPoseRequest.IMU_TO_CAMERA_COLOR) == TangoPoseRequest.IMU_TO_CAMERA_COLOR ) {
                // Query the extrinsics between IMU and color camera frame.
                pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU;
                pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_COLOR;
                PoseProvider.GetPoseAtTime( poseData, timestamp, pair );
                position = new Vector3( (float)poseData.translation[0],
                                       (float)poseData.translation[1],
                                       (float)poseData.translation[2] );
                quat = new Quaternion( (float)poseData.orientation[0],
                                      (float)poseData.orientation[1],
                                      (float)poseData.orientation[2],
                                      (float)poseData.orientation[3] );
                m_imuTcc = Matrix4x4.TRS( position, quat, one );
            }

            //m_dTuc = Matrix4x4.Inverse( m_imuTd ) * m_imuTcc * m_ccTuc;
        }
        // Use this for initialization
        void Start()
        {
            m_tangoApplication = FindObjectOfType<TangoApplication>();

            m_scanBounds = GetComponent<BoxCollider>();

            m_exportLock = new Mutex();
            m_progressLock = new Mutex();

            m_tangoApplication.RegisterPermissionsCallback( _OnTangoApplicationPermissionsEvent );
            m_tangoApplication.RequestNecessaryPermissionsAndConnect();

            //m_tangoApplication.Register( this );

            m_request = TangoPoseRequest.IMU_TO_DEVICE | TangoPoseRequest.IMU_TO_CAMERA_DEPTH | TangoPoseRequest.IMU_TO_CAMERA_COLOR;

            m_yuvTexture = m_tangoApplication.GetVideoOverlayTextureYUV();

            //// Pass YUV textures to shader for process.
            //m_screenMaterial.SetTexture( "_YTex", m_yuvTexture.m_videoOverlayTextureY );
            //m_screenMaterial.SetTexture( "_UTex", m_yuvTexture.m_videoOverlayTextureCb );
            //m_screenMaterial.SetTexture( "_VTex", m_yuvTexture.m_videoOverlayTextureCr );

            m_meshes = new Stack<MeshFilter>();
            m_isExporting = false;

            if( m_tangoApplication.m_useExperimentalVideoOverlay ) {
                VideoOverlayProvider.ExperimentalConnectTexture( TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_yuvTexture, OnExperimentalFrameAvailable );
            } else {
                VideoOverlayProvider.ConnectTexture( TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_colourBuffer.GetNativeTextureID() );
            }

            TangoUtility.Init();
        }