Example #1
0
    public OVRExternalComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
        : base(parentObject, mainCamera, configuration)
    {
#if OVR_ANDROID_MRC
        renderCombinedFrame = false;

        int frameWidth;
        int frameHeight;
        OVRPlugin.Media.GetMrcFrameSize(out frameWidth, out frameHeight);
        Debug.LogFormat("[OVRExternalComposition] Create render texture {0}, {1}", renderCombinedFrame ? frameWidth : frameWidth / 2, frameHeight);
        for (int i = 0; i < 2; ++i)
        {
            mrcRenderTextureArray[i] = new RenderTexture(renderCombinedFrame ? frameWidth : frameWidth / 2, frameHeight, 24, RenderTextureFormat.ARGB32);
            mrcRenderTextureArray[i].Create();
            cameraPoseTimeArray[i] = 0.0;
        }

        skipFrame = OVRManager.display.displayFrequency > fpsThreshold;
        OVRManager.DisplayRefreshRateChanged += DisplayRefreshRateChanged;
        frameIndex = 0;
        lastMrcEncodeFrameSyncId = -1;

        if (!renderCombinedFrame)
        {
            Debug.LogFormat("[OVRExternalComposition] Create extra render textures for foreground");
            for (int i = 0; i < 2; ++i)
            {
                mrcForegroundRenderTextureArray[i] = new RenderTexture(frameWidth / 2, frameHeight, 24, RenderTextureFormat.ARGB32);
                mrcForegroundRenderTextureArray[i].Create();
            }
        }
#endif
        RefreshCameraObjects(parentObject, mainCamera, configuration);
    }
 public static void ReadFrom(this OVRMixedRealityCaptureConfiguration dest, OVRMixedRealityCaptureConfiguration source)
 {
     dest.enableMixedReality = source.enableMixedReality;
     dest.compositionMethod  = source.compositionMethod;
     dest.extraHiddenLayers  = source.extraHiddenLayers;
     dest.externalCompositionBackdropColorRift  = source.externalCompositionBackdropColorRift;
     dest.externalCompositionBackdropColorQuest = source.externalCompositionBackdropColorQuest;
     dest.capturingCameraDevice             = source.capturingCameraDevice;
     dest.flipCameraFrameHorizontally       = source.flipCameraFrameHorizontally;
     dest.flipCameraFrameVertically         = source.flipCameraFrameVertically;
     dest.handPoseStateLatency              = source.handPoseStateLatency;
     dest.sandwichCompositionRenderLatency  = source.sandwichCompositionRenderLatency;
     dest.sandwichCompositionBufferedFrames = source.sandwichCompositionBufferedFrames;
     dest.chromaKeyColor              = source.chromaKeyColor;
     dest.chromaKeySimilarity         = source.chromaKeySimilarity;
     dest.chromaKeySmoothRange        = source.chromaKeySmoothRange;
     dest.chromaKeySpillRange         = source.chromaKeySpillRange;
     dest.useDynamicLighting          = source.useDynamicLighting;
     dest.depthQuality                = source.depthQuality;
     dest.dynamicLightingSmoothFactor = source.dynamicLightingSmoothFactor;
     dest.dynamicLightingDepthVariationClampingValue = source.dynamicLightingDepthVariationClampingValue;
     dest.virtualGreenScreenType              = source.virtualGreenScreenType;
     dest.virtualGreenScreenTopY              = source.virtualGreenScreenTopY;
     dest.virtualGreenScreenBottomY           = source.virtualGreenScreenBottomY;
     dest.virtualGreenScreenApplyDepthCulling = source.virtualGreenScreenApplyDepthCulling;
     dest.virtualGreenScreenDepthTolerance    = source.virtualGreenScreenDepthTolerance;
     dest.mrcActivationMode = source.mrcActivationMode;
     dest.instantiateMixedRealityCameraGameObject = source.instantiateMixedRealityCameraGameObject;
 }
Example #3
0
    private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
    {
        if (!hasCameraDeviceOpened)
        {
            Debug.LogWarning("[OVRDirectComposition] RefreshCameraObjects(): Unable to open camera device " + cameraDevice);
            return;
        }

        if (mainCamera.gameObject != previousMainCameraObject)
        {
            Debug.LogFormat("[OVRDirectComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);

            OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
            directCompositionCamera = null;

            RefreshCameraRig(parentObject, mainCamera);

            Debug.Assert(directCompositionCameraGameObject == null);
            if (configuration.instantiateMixedRealityCameraGameObject != null)
            {
                directCompositionCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Normal);
            }
            else
            {
                directCompositionCameraGameObject = Object.Instantiate(mainCamera.gameObject);
            }
            directCompositionCameraGameObject.name             = "OculusMRC_DirectCompositionCamera";
            directCompositionCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
            if (directCompositionCameraGameObject.GetComponent <AudioListener>())
            {
                Object.Destroy(directCompositionCameraGameObject.GetComponent <AudioListener>());
            }
            if (directCompositionCameraGameObject.GetComponent <OVRManager>())
            {
                Object.Destroy(directCompositionCameraGameObject.GetComponent <OVRManager>());
            }
            directCompositionCamera = directCompositionCameraGameObject.GetComponent <Camera>();
#if USING_MRC_COMPATIBLE_URP_VERSION
            var directCamData = directCompositionCamera.GetUniversalAdditionalCameraData();
            if (directCamData != null)
            {
                directCamData.allowXRRendering = false;
            }
#elif USING_URP
            Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
#else
            directCompositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
#endif
            directCompositionCamera.depth       = float.MaxValue;
            directCompositionCamera.rect        = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
            directCompositionCamera.cullingMask = (directCompositionCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;


            Debug.Log("DirectComposition activated : useDynamicLighting " + (configuration.useDynamicLighting ? "ON" : "OFF"));
            RefreshCameraFramePlaneObject(parentObject, directCompositionCamera, configuration);

            previousMainCameraObject = mainCamera.gameObject;
        }
    }
    protected OVRCameraComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
        : base(parentObject, mainCamera, configuration)
    {
        cameraDevice = OVRCompositionUtil.ConvertCameraDevice(configuration.capturingCameraDevice);

        Debug.Assert(!hasCameraDeviceOpened);
        Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice));
        hasCameraDeviceOpened = false;

        bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);

        if (configuration.useDynamicLighting && !cameraSupportsDepth)
        {
            Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
        }

        if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;
            if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
            }

            if (configuration.useDynamicLighting)
            {
                OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
                OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
                if (configuration.depthQuality == OVRManager.DepthQuality.Low)
                {
                    quality = OVRPlugin.CameraDeviceDepthQuality.Low;
                }
                else if (configuration.depthQuality == OVRManager.DepthQuality.Medium)
                {
                    quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
                }
                else if (configuration.depthQuality == OVRManager.DepthQuality.High)
                {
                    quality = OVRPlugin.CameraDeviceDepthQuality.High;
                }
                else
                {
                    Debug.LogWarning("Unknown depth quality");
                }
                OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality);
            }

            Debug.LogFormat("Opening camera device {0}", cameraDevice);
            OVRPlugin.OpenCameraDevice(cameraDevice);
            if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
            {
                Debug.LogFormat("Opened camera device {0}", cameraDevice);
                hasCameraDeviceOpened = true;
            }
        }
    }
Example #5
0
    private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
    {
        if (!hasCameraDeviceOpened)
        {
            Debug.LogWarning("[OVRDirectComposition] RefreshCameraObjects(): Unable to open camera device " + cameraDevice);
            return;
        }

        if (mainCamera.gameObject != previousMainCameraObject)
        {
            Debug.LogFormat("[OVRDirectComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);

            OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
            directCompositionCamera = null;

            RefreshCameraRig(parentObject, mainCamera);

            Debug.Assert(directCompositionCameraGameObject == null);
            if (configuration.instantiateMixedRealityCameraGameObject != null)
            {
                directCompositionCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Normal);
            }
            else
            {
                directCompositionCameraGameObject = Object.Instantiate(mainCamera.gameObject);
            }
            directCompositionCameraGameObject.name             = "OculusMRC_DirectCompositionCamera";
            directCompositionCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
            if (directCompositionCameraGameObject.GetComponent <AudioListener>())
            {
                Object.Destroy(directCompositionCameraGameObject.GetComponent <AudioListener>());
            }
            if (directCompositionCameraGameObject.GetComponent <OVRManager>())
            {
                Object.Destroy(directCompositionCameraGameObject.GetComponent <OVRManager>());
            }
            directCompositionCamera = directCompositionCameraGameObject.GetComponent <Camera>();
            directCompositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
            directCompositionCamera.depth           = float.MaxValue;
            directCompositionCamera.rect            = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
            directCompositionCamera.cullingMask     = (directCompositionCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;


            Debug.Log("DirectComposition activated : useDynamicLighting " + (configuration.useDynamicLighting ? "ON" : "OFF"));
            RefreshCameraFramePlaneObject(parentObject, directCompositionCamera, configuration);

            previousMainCameraObject = mainCamera.gameObject;
        }
    }
    protected void RefreshBoundaryMesh(Camera camera, OVRMixedRealityCaptureConfiguration configuration, out float cullingDistance)
    {
        float depthTolerance = configuration.virtualGreenScreenApplyDepthCulling ? configuration.virtualGreenScreenDepthTolerance : float.PositiveInfinity;

        cullingDistance = OVRCompositionUtil.GetMaximumBoundaryDistance(camera, OVRCompositionUtil.ToBoundaryType(configuration.virtualGreenScreenType)) + depthTolerance;
        if (boundaryMesh == null || boundaryMeshType != configuration.virtualGreenScreenType || boundaryMeshTopY != configuration.virtualGreenScreenTopY || boundaryMeshBottomY != configuration.virtualGreenScreenBottomY)
        {
            boundaryMeshTopY    = configuration.virtualGreenScreenTopY;
            boundaryMeshBottomY = configuration.virtualGreenScreenBottomY;
            boundaryMesh        = OVRCompositionUtil.BuildBoundaryMesh(OVRCompositionUtil.ToBoundaryType(configuration.virtualGreenScreenType), boundaryMeshTopY, boundaryMeshBottomY);
            boundaryMeshType    = configuration.virtualGreenScreenType;

            // Creating GameObject for testing purpose only
            //GameObject boundaryMeshObject = new GameObject("BoundaryMeshObject");
            //boundaryMeshObject.AddComponent<MeshFilter>().mesh = boundaryMesh;
            //boundaryMeshObject.AddComponent<MeshRenderer>();
        }
    }
    protected void UpdateCameraFramePlaneObject(Camera mainCamera, Camera mixedRealityCamera, OVRMixedRealityCaptureConfiguration configuration, RenderTexture boundaryMeshMaskTexture)
    {
        cameraFrameCompositionManager.configuration = configuration;
        bool      hasError            = false;
        Material  cameraFrameMaterial = cameraFramePlaneObject.GetComponent <MeshRenderer>().material;
        Texture2D colorTexture        = Texture2D.blackTexture;
        Texture2D depthTexture        = Texture2D.whiteTexture;

        if (OVRPlugin.IsCameraDeviceColorFrameAvailable(cameraDevice))
        {
            colorTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(cameraDevice);
        }
        else
        {
            Debug.LogWarning("Camera: color frame not ready");
            hasError = true;
        }
        bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);

        if (configuration.useDynamicLighting && cameraSupportsDepth)
        {
            if (OVRPlugin.IsCameraDeviceDepthFrameAvailable(cameraDevice))
            {
                depthTexture = OVRPlugin.GetCameraDeviceDepthFrameTexture(cameraDevice);
            }
            else
            {
                Debug.LogWarning("Camera: depth frame not ready");
                hasError = true;
            }
        }
        if (!hasError)
        {
            Vector3 offset   = mainCamera.transform.position - mixedRealityCamera.transform.position;
            float   distance = Vector3.Dot(mixedRealityCamera.transform.forward, offset);
            cameraFramePlaneDistance = distance;

            cameraFramePlaneObject.transform.position = mixedRealityCamera.transform.position + mixedRealityCamera.transform.forward * distance;
            cameraFramePlaneObject.transform.rotation = mixedRealityCamera.transform.rotation;

            float tanFov = Mathf.Tan(mixedRealityCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);
            cameraFramePlaneObject.transform.localScale = new Vector3(distance * mixedRealityCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);

            float worldHeight = distance * tanFov * 2.0f;
            float worldWidth  = worldHeight * mixedRealityCamera.aspect;

            float cullingDistance = float.MaxValue;

            if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
            {
                RefreshBoundaryMesh(mixedRealityCamera, configuration, out cullingDistance);
            }

            cameraFrameMaterial.mainTexture = colorTexture;
            cameraFrameMaterial.SetTexture("_DepthTex", depthTexture);
            cameraFrameMaterial.SetVector("_FlipParams", new Vector4((configuration.flipCameraFrameHorizontally ? 1.0f : 0.0f), (configuration.flipCameraFrameVertically ? 1.0f : 0.0f), 0.0f, 0.0f));
            cameraFrameMaterial.SetColor("_ChromaKeyColor", configuration.chromaKeyColor);
            cameraFrameMaterial.SetFloat("_ChromaKeySimilarity", configuration.chromaKeySimilarity);
            cameraFrameMaterial.SetFloat("_ChromaKeySmoothRange", configuration.chromaKeySmoothRange);
            cameraFrameMaterial.SetFloat("_ChromaKeySpillRange", configuration.chromaKeySpillRange);
            cameraFrameMaterial.SetVector("_TextureDimension", new Vector4(colorTexture.width, colorTexture.height, 1.0f / colorTexture.width, 1.0f / colorTexture.height));
            cameraFrameMaterial.SetVector("_TextureWorldSize", new Vector4(worldWidth, worldHeight, 0, 0));
            cameraFrameMaterial.SetFloat("_SmoothFactor", configuration.dynamicLightingSmoothFactor);
            cameraFrameMaterial.SetFloat("_DepthVariationClamp", configuration.dynamicLightingDepthVariationClampingValue);
            cameraFrameMaterial.SetFloat("_CullingDistance", cullingDistance);
            if (configuration.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || boundaryMesh == null || boundaryMeshMaskTexture == null)
            {
                cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
            }
            else
            {
                if (cameraRig == null)
                {
                    if (!nullcameraRigWarningDisplayed)
                    {
                        Debug.LogWarning("Could not find the OVRCameraRig/CenterEyeAnchor object. Please check if the OVRCameraRig has been setup properly. The virtual green screen has been temporarily disabled");
                        nullcameraRigWarningDisplayed = true;
                    }

                    cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
                }
                else
                {
                    if (nullcameraRigWarningDisplayed)
                    {
                        Debug.Log("OVRCameraRig/CenterEyeAnchor object found. Virtual green screen is activated");
                        nullcameraRigWarningDisplayed = false;
                    }

                    cameraFrameMaterial.SetTexture("_MaskTex", boundaryMeshMaskTexture);
                }
            }
        }
    }
    protected void RefreshCameraFramePlaneObject(GameObject parentObject, Camera mixedRealityCamera, OVRMixedRealityCaptureConfiguration configuration)
    {
        OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);

        Debug.Assert(cameraFramePlaneObject == null);
        cameraFramePlaneObject                  = GameObject.CreatePrimitive(PrimitiveType.Quad);
        cameraFramePlaneObject.name             = "OculusMRC_CameraFrame";
        cameraFramePlaneObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
        cameraFramePlaneObject.GetComponent <Collider>().enabled = false;
        cameraFramePlaneObject.GetComponent <MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
        Material cameraFrameMaterial = new Material(Shader.Find(configuration.useDynamicLighting ? "Oculus/OVRMRCameraFrameLit" : "Oculus/OVRMRCameraFrame"));

        cameraFramePlaneObject.GetComponent <MeshRenderer>().material = cameraFrameMaterial;
        cameraFrameMaterial.SetColor("_Color", Color.white);
        cameraFrameMaterial.SetFloat("_Visible", 0.0f);
        cameraFramePlaneObject.transform.localScale = new Vector3(4, 4, 4);
        cameraFramePlaneObject.SetActive(true);
        cameraFrameCompositionManager = mixedRealityCamera.gameObject.AddComponent <OVRCameraFrameCompositionManager>();
        cameraFrameCompositionManager.configuration      = configuration;
        cameraFrameCompositionManager.cameraFrameGameObj = cameraFramePlaneObject;
        cameraFrameCompositionManager.composition        = this;
    }
Example #9
0
    /// <summary>
    /// Updates the internal state of the Mixed Reality Camera. Called by OVRManager.
    /// </summary>

    public static void Update(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
    {
        if (!OVRPlugin.initialized)
        {
            Debug.LogError("OVRPlugin not initialized");
            return;
        }

        if (!OVRPlugin.IsMixedRealityInitialized())
        {
            OVRPlugin.InitializeMixedReality();
            if (OVRPlugin.IsMixedRealityInitialized())
            {
                Debug.Log("OVRPlugin_MixedReality initialized");
            }
            else
            {
                Debug.LogError("Unable to initialize OVRPlugin_MixedReality");
                return;
            }
        }

        if (!OVRPlugin.IsMixedRealityInitialized())
        {
            return;
        }

        OVRPlugin.UpdateExternalCamera();
#if !OVR_ANDROID_MRC
        OVRPlugin.UpdateCameraDevices();
#endif

#if OVR_ANDROID_MRC
        useFakeExternalCamera = OVRPlugin.Media.UseMrcDebugCamera();
#endif

        if (currentComposition != null && (currentComposition.CompositionMethod() != configuration.compositionMethod))
        {
            currentComposition.Cleanup();
            currentComposition = null;
        }

        if (configuration.compositionMethod == OVRManager.CompositionMethod.External)
        {
            if (currentComposition == null)
            {
                currentComposition = new OVRExternalComposition(parentObject, mainCamera, configuration);
            }
        }
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
        else if (configuration.compositionMethod == OVRManager.CompositionMethod.Direct)
        {
            if (currentComposition == null)
            {
                currentComposition = new OVRDirectComposition(parentObject, mainCamera, configuration);
            }
        }
#endif
        else
        {
            Debug.LogError("Unknown CompositionMethod : " + configuration.compositionMethod);
            return;
        }
        currentComposition.Update(parentObject, mainCamera, configuration, trackingOrigin);
    }
Example #10
0
    private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
    {
        if (mainCamera.gameObject != previousMainCameraObject)
        {
            Debug.LogFormat("[OVRExternalComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);

            OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
            backgroundCamera = null;
            OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
            foregroundCamera = null;

            RefreshCameraRig(parentObject, mainCamera);

            Debug.Assert(backgroundCameraGameObject == null);
            if (configuration.instantiateMixedRealityCameraGameObject != null)
            {
                backgroundCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Background);
            }
            else
            {
                backgroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
            }

            backgroundCameraGameObject.name             = "OculusMRC_BackgroundCamera";
            backgroundCameraGameObject.transform.parent =
                cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
            if (backgroundCameraGameObject.GetComponent <AudioListener>())
            {
                Object.Destroy(backgroundCameraGameObject.GetComponent <AudioListener>());
            }

            if (backgroundCameraGameObject.GetComponent <OVRManager>())
            {
                Object.Destroy(backgroundCameraGameObject.GetComponent <OVRManager>());
            }
            backgroundCamera     = backgroundCameraGameObject.GetComponent <Camera>();
            backgroundCamera.tag = "Untagged";
#if USING_MRC_COMPATIBLE_URP_VERSION
            var backgroundCamData = backgroundCamera.GetUniversalAdditionalCameraData();
            if (backgroundCamData != null)
            {
                backgroundCamData.allowXRRendering = false;
            }
#elif USING_URP
            Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
#else
            backgroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
#endif
            backgroundCamera.depth       = 99990.0f;
            backgroundCamera.rect        = new Rect(0.0f, 0.0f, 0.5f, 1.0f);
            backgroundCamera.cullingMask = (backgroundCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
#if OVR_ANDROID_MRC
            backgroundCamera.targetTexture = mrcRenderTextureArray[0];
            if (!renderCombinedFrame)
            {
                backgroundCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
            }
#endif

            Debug.Assert(foregroundCameraGameObject == null);
            if (configuration.instantiateMixedRealityCameraGameObject != null)
            {
                foregroundCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Foreground);
            }
            else
            {
                foregroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
            }

            foregroundCameraGameObject.name             = "OculusMRC_ForgroundCamera";
            foregroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
            if (foregroundCameraGameObject.GetComponent <AudioListener>())
            {
                Object.Destroy(foregroundCameraGameObject.GetComponent <AudioListener>());
            }
            if (foregroundCameraGameObject.GetComponent <OVRManager>())
            {
                Object.Destroy(foregroundCameraGameObject.GetComponent <OVRManager>());
            }
            foregroundCamera     = foregroundCameraGameObject.GetComponent <Camera>();
            foregroundCamera.tag = "Untagged";
#if USING_MRC_COMPATIBLE_URP_VERSION
            var foregroundCamData = foregroundCamera.GetUniversalAdditionalCameraData();
            if (foregroundCamData != null)
            {
                foregroundCamData.allowXRRendering = false;
            }
#elif USING_URP
            Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
#else
            foregroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
#endif
            foregroundCamera.depth      = backgroundCamera.depth + 1.0f;            // enforce the forground be rendered after the background
            foregroundCamera.rect       = new Rect(0.5f, 0.0f, 0.5f, 1.0f);
            foregroundCamera.clearFlags = CameraClearFlags.Color;
#if OVR_ANDROID_MRC
            foregroundCamera.backgroundColor = configuration.externalCompositionBackdropColorQuest;
#else
            foregroundCamera.backgroundColor = configuration.externalCompositionBackdropColorRift;
#endif
            foregroundCamera.cullingMask = (foregroundCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;

#if OVR_ANDROID_MRC
            if (renderCombinedFrame)
            {
                foregroundCamera.targetTexture = mrcRenderTextureArray[0];
            }
            else
            {
                foregroundCamera.targetTexture = mrcForegroundRenderTextureArray[0];
                foregroundCamera.rect          = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
            }
#endif

            previousMainCameraObject = mainCamera.gameObject;
        }
    }
Example #11
0
    public override void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
    {
#if OVR_ANDROID_MRC
        if (skipFrame && OVRPlugin.Media.IsCastingToRemoteClient())
        {
            isFrameSkipped = !isFrameSkipped;
            if (isFrameSkipped)
            {
                return;
            }
        }
#endif

        RefreshCameraObjects(gameObject, mainCamera, configuration);

        OVRPlugin.SetHandNodePoseStateLatency(0.0);             // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

        // For third-person camera to use for calculating camera position with different anchors
        OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
        OVRPose localToStagePose = stageToLocalPose.Inverse();
        OVRPose head             = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.Head, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose leftC            = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose rightC           = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandRight, OVRPlugin.Step.Render).ToOVRPose();
        OVRPlugin.Media.SetMrcHeadsetControllerPose(head.ToPosef(), leftC.ToPosef(), rightC.ToPosef());

#if OVR_ANDROID_MRC
        RefreshAudioFilter();

        int drawTextureIndex = (frameIndex / 2) % 2;
        int castTextureIndex = 1 - drawTextureIndex;

        backgroundCamera.enabled = (frameIndex % 2) == 0;
        foregroundCamera.enabled = (frameIndex % 2) == 1;

        if (frameIndex % 2 == 0)
        {
            if (lastMrcEncodeFrameSyncId != -1)
            {
                OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
                lastMrcEncodeFrameSyncId = -1;
            }
            lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
            SetCameraTargetTexture(drawTextureIndex);
        }

        ++frameIndex;
#endif

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        if (configuration.dynamicCullingMask)
        {
            backgroundCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
        }
        backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (configuration.dynamicCullingMask)
        {
            foregroundCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
        }
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = trackingOrigin == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;

            if (cameraInTrackingSpace)
            {
                backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                    foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(worldSpacePose);
                    foregroundCamera.transform.FromOVRPose(worldSpacePose);
                }
#if OVR_ANDROID_MRC
                cameraPoseTimeArray[drawTextureIndex] = extrinsics.LastChangedTimeSeconds;
#endif
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
        float   clipDistance            = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
        foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
    }
 public static void ApplyTo(this OVRMixedRealityCaptureConfiguration dest, OVRMixedRealityCaptureConfiguration source)
 {
     dest.ReadFrom(source);
 }
 public abstract void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin);
 protected OVRComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
 {
     RefreshCameraRig(parentObject, mainCamera);
 }
Example #15
0
 public OVRDirectComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
     : base(parentObject, mainCamera, configuration)
 {
     RefreshCameraObjects(parentObject, mainCamera, configuration);
 }
Example #16
0
    public override void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
    {
        if (!hasCameraDeviceOpened)
        {
            return;
        }

        RefreshCameraObjects(gameObject, mainCamera, configuration);

        if (!OVRPlugin.SetHandNodePoseStateLatency(configuration.handPoseStateLatency))
        {
            Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + configuration.handPoseStateLatency);
        }

        directCompositionCamera.clearFlags      = mainCamera.clearFlags;
        directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
        if (configuration.dynamicCullingMask)
        {
            directCompositionCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
        }

        directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
        directCompositionCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = trackingOrigin == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation       = OVRMixedReality.fakeCameraRotation;
            directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            directCompositionCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            if (cameraInTrackingSpace)
            {
                directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                OVRPose worldSpacePose = new OVRPose();
                worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
                directCompositionCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                directCompositionCamera.fieldOfView = fovY;
                directCompositionCamera.aspect      = aspect;
                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
                    directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
                    directCompositionCamera.transform.FromOVRPose(worldSpacePose);
                }
            }
            else
            {
                Debug.LogWarning("Failed to get external camera information");
            }
        }

        if (hasCameraDeviceOpened)
        {
            if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
            {
                boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
                boundaryMeshMaskTexture.Create();
            }
            UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, configuration, boundaryMeshMaskTexture);
            directCompositionCamera.GetComponent <OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
        }
    }