Example #1
0
    /// <summary>
    /// Updates the internal state of the OVRDisplay. Called by OVRManager.
    /// </summary>
    public void Update()
    {
        UpdateTextures();

        if (recenterRequested && Time.frameCount > recenterRequestedFrameCount)
        {
            if (RecenteredPose != null)
            {
                RecenteredPose();
            }
            recenterRequested           = false;
            recenterRequestedFrameCount = int.MaxValue;
        }
        if (OVRPlugin.GetSystemHeadsetType() == OVRPlugin.SystemHeadset.Oculus_Quest)
        {
            OVRManager.TrackingOrigin relativeOrigin = (OVRManager.instance.trackingOriginType != OVRManager.TrackingOrigin.Stage) ? OVRManager.TrackingOrigin.Stage : OVRManager.TrackingOrigin.EyeLevel;
            OVRPose relativeTrackingSpacePose        = OVRPlugin.GetTrackingTransformRelativePose((OVRPlugin.TrackingOrigin)relativeOrigin).ToOVRPose();
            //If the tracking origin type hasn't switched and the relative pose changes, a recenter occurred.
            if (previousTrackingOrigin == OVRManager.instance.trackingOriginType && previousRelativeTrackingSpacePose != relativeTrackingSpacePose && RecenteredPose != null)
            {
                RecenteredPose();
            }
            previousRelativeTrackingSpacePose = relativeTrackingSpacePose;
            previousTrackingOrigin            = OVRManager.instance.trackingOriginType;
        }
    }
Example #2
0
 /// <summary>
 /// Creates an instance of OVRDisplay. Called by OVRManager.
 /// </summary>
 public OVRDisplay()
 {
     UpdateTextures();
     if (OVRPlugin.GetSystemHeadsetType() == OVRPlugin.SystemHeadset.Oculus_Quest)
     {
         previousTrackingOrigin = OVRManager.instance.trackingOriginType;
         OVRManager.TrackingOrigin relativeOrigin = (previousTrackingOrigin != OVRManager.TrackingOrigin.Stage) ? OVRManager.TrackingOrigin.Stage : OVRManager.TrackingOrigin.EyeLevel;
         previousRelativeTrackingSpacePose = OVRPlugin.GetTrackingTransformRelativePose((OVRPlugin.TrackingOrigin)relativeOrigin).ToOVRPose();
     }
 }
Example #3
0
    /// <summary>
    /// Updates the internal state of the Mixed Reality Camera. Called by OVRManager.
    /// </summary>

    public static void Update(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
    {
        if (!OVRPlugin.initialized)
        {
            Debug.LogError("OVRPlugin not initialized");
            return;
        }

        if (!OVRPlugin.IsMixedRealityInitialized())
        {
            OVRPlugin.InitializeMixedReality();
            if (OVRPlugin.IsMixedRealityInitialized())
            {
                Debug.Log("OVRPlugin_MixedReality initialized");
            }
            else
            {
                Debug.LogError("Unable to initialize OVRPlugin_MixedReality");
                return;
            }
        }

        if (!OVRPlugin.IsMixedRealityInitialized())
        {
            return;
        }

        OVRPlugin.UpdateExternalCamera();
#if !OVR_ANDROID_MRC
        OVRPlugin.UpdateCameraDevices();
#endif

#if OVR_ANDROID_MRC
        useFakeExternalCamera = OVRPlugin.Media.UseMrcDebugCamera();
#endif

        if (currentComposition != null && (currentComposition.CompositionMethod() != configuration.compositionMethod))
        {
            currentComposition.Cleanup();
            currentComposition = null;
        }

        if (configuration.compositionMethod == OVRManager.CompositionMethod.External)
        {
            if (currentComposition == null)
            {
                currentComposition = new OVRExternalComposition(parentObject, mainCamera, configuration);
            }
        }
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
        else if (configuration.compositionMethod == OVRManager.CompositionMethod.Direct)
        {
            if (currentComposition == null)
            {
                currentComposition = new OVRDirectComposition(parentObject, mainCamera, configuration);
            }
        }
#endif
        else
        {
            Debug.LogError("Unknown CompositionMethod : " + configuration.compositionMethod);
            return;
        }
        currentComposition.Update(parentObject, mainCamera, configuration, trackingOrigin);
    }
Example #4
0
    public override void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
    {
#if OVR_ANDROID_MRC
        if (skipFrame && OVRPlugin.Media.IsCastingToRemoteClient())
        {
            isFrameSkipped = !isFrameSkipped;
            if (isFrameSkipped)
            {
                return;
            }
        }
#endif

        RefreshCameraObjects(gameObject, mainCamera, configuration);

        OVRPlugin.SetHandNodePoseStateLatency(0.0);             // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

        // For third-person camera to use for calculating camera position with different anchors
        OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
        OVRPose localToStagePose = stageToLocalPose.Inverse();
        OVRPose head             = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.Head, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose leftC            = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose rightC           = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandRight, OVRPlugin.Step.Render).ToOVRPose();
        OVRPlugin.Media.SetMrcHeadsetControllerPose(head.ToPosef(), leftC.ToPosef(), rightC.ToPosef());

#if OVR_ANDROID_MRC
        RefreshAudioFilter();

        int drawTextureIndex = (frameIndex / 2) % 2;
        int castTextureIndex = 1 - drawTextureIndex;

        backgroundCamera.enabled = (frameIndex % 2) == 0;
        foregroundCamera.enabled = (frameIndex % 2) == 1;

        if (frameIndex % 2 == 0)
        {
            if (lastMrcEncodeFrameSyncId != -1)
            {
                OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
                lastMrcEncodeFrameSyncId = -1;
            }
            lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
            SetCameraTargetTexture(drawTextureIndex);
        }

        ++frameIndex;
#endif

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        if (configuration.dynamicCullingMask)
        {
            backgroundCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
        }
        backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (configuration.dynamicCullingMask)
        {
            foregroundCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
        }
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = trackingOrigin == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;

            if (cameraInTrackingSpace)
            {
                backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                    foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(worldSpacePose);
                    foregroundCamera.transform.FromOVRPose(worldSpacePose);
                }
#if OVR_ANDROID_MRC
                cameraPoseTimeArray[drawTextureIndex] = extrinsics.LastChangedTimeSeconds;
#endif
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
        float   clipDistance            = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
        foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
    }
 public abstract void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin);
Example #6
0
    public override void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
    {
        if (!hasCameraDeviceOpened)
        {
            return;
        }

        RefreshCameraObjects(gameObject, mainCamera, configuration);

        if (!OVRPlugin.SetHandNodePoseStateLatency(configuration.handPoseStateLatency))
        {
            Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + configuration.handPoseStateLatency);
        }

        directCompositionCamera.clearFlags      = mainCamera.clearFlags;
        directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
        if (configuration.dynamicCullingMask)
        {
            directCompositionCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
        }

        directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
        directCompositionCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = trackingOrigin == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation       = OVRMixedReality.fakeCameraRotation;
            directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            directCompositionCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            if (cameraInTrackingSpace)
            {
                directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                OVRPose worldSpacePose = new OVRPose();
                worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
                directCompositionCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                directCompositionCamera.fieldOfView = fovY;
                directCompositionCamera.aspect      = aspect;
                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
                    directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
                    directCompositionCamera.transform.FromOVRPose(worldSpacePose);
                }
            }
            else
            {
                Debug.LogWarning("Failed to get external camera information");
            }
        }

        if (hasCameraDeviceOpened)
        {
            if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
            {
                boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
                boundaryMeshMaskTexture.Create();
            }
            UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, configuration, boundaryMeshMaskTexture);
            directCompositionCamera.GetComponent <OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
        }
    }