protected OVRCameraComposition(GameObject parentObject, Camera mainCamera, OVRManager.CameraDevice inCameraDevice, bool inUseDynamicLighting, OVRManager.DepthQuality depthQuality)
        : base(parentObject, mainCamera)
    {
        cameraDevice = OVRCompositionUtil.ConvertCameraDevice(inCameraDevice);

        Debug.Assert(!hasCameraDeviceOpened);
        Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice));
        hasCameraDeviceOpened = false;
        useDynamicLighting    = inUseDynamicLighting;

        bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);

        if (useDynamicLighting && !cameraSupportsDepth)
        {
            Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
        }

        if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;
            OVRPlugin.Posef            calibrationRawPose;
            if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics, out calibrationRawPose))
            {
                OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
            }

            if (useDynamicLighting)
            {
                OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
                OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
                if (depthQuality == OVRManager.DepthQuality.Low)
                {
                    quality = OVRPlugin.CameraDeviceDepthQuality.Low;
                }
                else if (depthQuality == OVRManager.DepthQuality.Medium)
                {
                    quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
                }
                else if (depthQuality == OVRManager.DepthQuality.High)
                {
                    quality = OVRPlugin.CameraDeviceDepthQuality.High;
                }
                else
                {
                    Debug.LogWarning("Unknown depth quality");
                }
                OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality);
            }

            Debug.LogFormat("Opening camera device {0}", cameraDevice);
            OVRPlugin.OpenCameraDevice(cameraDevice);
            if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
            {
                Debug.LogFormat("Opened camera device {0}", cameraDevice);
                hasCameraDeviceOpened = true;
            }
        }
    }
    private static void InitializeDirectCompositionObjects(GameObject parentObject, Camera mainCamera)
    {
        Debug.Assert(!externalCompositionObjectsInitialized);
        Debug.Assert(!directCompositionObjectsInitialized);

        Debug.Assert(directCompositionCameraGameObject == null);
        directCompositionCameraGameObject                  = new GameObject();
        directCompositionCameraGameObject.name             = "MRDirectCompositionCamera";
        directCompositionCameraGameObject.transform.parent = parentObject.transform;
        directCompositionCamera = directCompositionCameraGameObject.AddComponent <Camera>();
        directCompositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
        directCompositionCamera.depth           = float.MaxValue;
        directCompositionCamera.rect            = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
        directCompositionCamera.clearFlags      = mainCamera.clearFlags;
        directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
        directCompositionCamera.cullingMask     = mainCamera.cullingMask;
        directCompositionCamera.nearClipPlane   = mainCamera.nearClipPlane;
        directCompositionCamera.farClipPlane    = mainCamera.farClipPlane;

        Debug.Assert(cameraFramePlaneObject == null);
        cameraFramePlaneObject                  = GameObject.CreatePrimitive(PrimitiveType.Quad);
        cameraFramePlaneObject.name             = "MRCameraFrame";
        cameraFramePlaneObject.transform.parent = parentObject.transform;
        cameraFramePlaneObject.GetComponent <Collider>().enabled = false;
        cameraFramePlaneObject.GetComponent <MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
        Material cameraFrameMaterial = new Material(Shader.Find("Unlit/OVRMRCameraFrame"));

        cameraFramePlaneObject.GetComponent <MeshRenderer>().material = cameraFrameMaterial;
        cameraFrameMaterial.SetColor("_Color", Color.white);
        cameraFrameMaterial.SetFloat("_Visible", 0.0f);
        cameraFramePlaneObject.transform.localScale = new Vector3(4, 4, 4);
        cameraFramePlaneObject.SetActive(true);
        OVRMRDirectCompositionCameraManager directCompositionCameraManager = directCompositionCameraGameObject.AddComponent <OVRMRDirectCompositionCameraManager>();

        directCompositionCameraManager.cameraFrameGameObj = cameraFramePlaneObject;

        Debug.Assert(!directCompositionCameraDeviceOpened);
        Debug.Assert(!OVRPlugin.HasCameraDeviceOpened(directCompositionCameraDevice));
        if (OVRPlugin.IsCameraDeviceAvailable(directCompositionCameraDevice))
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;
            if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPlugin.SetCameraDevicePreferredColorFrameSize(directCompositionCameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
            }

            OVRPlugin.OpenCameraDevice(directCompositionCameraDevice);
            if (OVRPlugin.HasCameraDeviceOpened(directCompositionCameraDevice))
            {
                directCompositionCameraDeviceOpened = true;
            }
        }

        directCompositionObjectsInitialized = true;
    }
	protected OVRCameraComposition(OVRManager.CameraDevice inCameraDevice, bool inUseDynamicLighting, OVRManager.DepthQuality depthQuality)
	{
		cameraDevice = OVRCompositionUtil.ConvertCameraDevice(inCameraDevice):

		Debug.Assert(!hasCameraDeviceOpened):
		Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice)):
		hasCameraDeviceOpened = false:
		useDynamicLighting = inUseDynamicLighting:

		bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice):
		if (useDynamicLighting && !cameraSupportsDepth)
		{
			Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct"):
		}

		if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
		{
			OVRPlugin.CameraExtrinsics extrinsics:
			OVRPlugin.CameraIntrinsics intrinsics:
			if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
			{
				OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h):
			}

			if (useDynamicLighting)
			{
				OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill):
				OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium:
				if (depthQuality == OVRManager.DepthQuality.Low)
				{
					quality = OVRPlugin.CameraDeviceDepthQuality.Low:
				}
				else if (depthQuality == OVRManager.DepthQuality.Medium)
				{
					quality = OVRPlugin.CameraDeviceDepthQuality.Medium:
				}
				else if (depthQuality == OVRManager.DepthQuality.High)
				{
					quality = OVRPlugin.CameraDeviceDepthQuality.High:
				}
				else
				{
					Debug.LogWarning("Unknown depth quality"):
				}
				OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality):
			}

			OVRPlugin.OpenCameraDevice(cameraDevice):
			if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
			{
				hasCameraDeviceOpened = true:
			}
		}
	}
Exemple #4
0
 public override void Update(Camera mainCamera)
 {
     if (!this.hasCameraDeviceOpened)
     {
         return;
     }
     this.frameRealtime = Time.realtimeSinceStartup;
     this.historyRecordCursorIndex++;
     if (this.historyRecordCursorIndex >= this.historyRecordCount)
     {
         this.historyRecordCursorIndex = 0;
     }
     if (!OVRPlugin.SetHandNodePoseStateLatency((double)OVRManager.instance.handPoseStateLatency))
     {
         Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
     }
     this.RefreshRenderTextures(mainCamera);
     this.bgCamera.clearFlags      = mainCamera.clearFlags;
     this.bgCamera.backgroundColor = mainCamera.backgroundColor;
     this.bgCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     this.fgCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     OVRPlugin.CameraExtrinsics extrinsics;
     OVRPlugin.CameraIntrinsics cameraIntrinsics;
     if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
     {
         OVRPose pose = default(OVRPose);
         pose = OVRExtensions.ToWorldSpacePose(new OVRPose
         {
             position    = OVRMixedReality.fakeCameraPositon,
             orientation = OVRMixedReality.fakeCameraRotation
         });
         this.RefreshCameraPoses(OVRMixedReality.fakeCameraFov, OVRMixedReality.fakeCameraAspect, pose);
     }
     else if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out cameraIntrinsics))
     {
         OVRPose pose2  = base.ComputeCameraWorldSpacePose(extrinsics);
         float   fovY   = Mathf.Atan(cameraIntrinsics.FOVPort.UpTan) * 57.29578f * 2f;
         float   aspect = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
         this.RefreshCameraPoses(fovY, aspect, pose2);
     }
     else
     {
         Debug.LogWarning("Failed to get external camera information");
     }
     this.compositionCamera.GetComponent <OVRCameraComposition.OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = this.historyRecordArray[this.historyRecordCursorIndex].boundaryMeshMaskTexture;
     OVRSandwichComposition.HistoryRecord historyRecordForComposition = this.GetHistoryRecordForComposition();
     base.UpdateCameraFramePlaneObject(mainCamera, this.compositionCamera, historyRecordForComposition.boundaryMeshMaskTexture);
     OVRSandwichComposition.OVRSandwichCompositionManager component = this.compositionCamera.gameObject.GetComponent <OVRSandwichComposition.OVRSandwichCompositionManager>();
     component.fgTexture = historyRecordForComposition.fgRenderTexture;
     component.bgTexture = historyRecordForComposition.bgRenderTexture;
     this.cameraProxyPlane.transform.position = this.fgCamera.transform.position + this.fgCamera.transform.forward * this.cameraFramePlaneDistance;
     this.cameraProxyPlane.transform.LookAt(this.cameraProxyPlane.transform.position + this.fgCamera.transform.forward);
 }
Exemple #5
0
 public override void Update(Camera mainCamera)
 {
     if (!this.hasCameraDeviceOpened)
     {
         return;
     }
     if (!OVRPlugin.SetHandNodePoseStateLatency((double)OVRManager.instance.handPoseStateLatency))
     {
         Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
     }
     this.directCompositionCamera.clearFlags      = mainCamera.clearFlags;
     this.directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
     this.directCompositionCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     this.directCompositionCamera.nearClipPlane   = mainCamera.nearClipPlane;
     this.directCompositionCamera.farClipPlane    = mainCamera.farClipPlane;
     OVRPlugin.CameraExtrinsics extrinsics;
     OVRPlugin.CameraIntrinsics cameraIntrinsics;
     if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
     {
         OVRPose pose = default(OVRPose);
         pose = OVRExtensions.ToWorldSpacePose(new OVRPose
         {
             position    = OVRMixedReality.fakeCameraPositon,
             orientation = OVRMixedReality.fakeCameraRotation
         });
         this.directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
         this.directCompositionCamera.aspect      = OVRMixedReality.fakeCameraAspect;
         this.directCompositionCamera.transform.FromOVRPose(pose, false);
     }
     else if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out cameraIntrinsics))
     {
         OVRPose pose2       = base.ComputeCameraWorldSpacePose(extrinsics);
         float   fieldOfView = Mathf.Atan(cameraIntrinsics.FOVPort.UpTan) * 57.29578f * 2f;
         float   aspect      = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
         this.directCompositionCamera.fieldOfView = fieldOfView;
         this.directCompositionCamera.aspect      = aspect;
         this.directCompositionCamera.transform.FromOVRPose(pose2, false);
     }
     else
     {
         Debug.LogWarning("Failed to get external camera information");
     }
     if (this.hasCameraDeviceOpened)
     {
         if (this.boundaryMeshMaskTexture == null || this.boundaryMeshMaskTexture.width != Screen.width || this.boundaryMeshMaskTexture.height != Screen.height)
         {
             this.boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
             this.boundaryMeshMaskTexture.Create();
         }
         base.UpdateCameraFramePlaneObject(mainCamera, this.directCompositionCamera, this.boundaryMeshMaskTexture);
         this.directCompositionCamera.GetComponent <OVRCameraComposition.OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = this.boundaryMeshMaskTexture;
     }
 }
Exemple #6
0
    public override void Update(Camera mainCamera)
    {
        OVRPlugin.SetHandNodePoseStateLatency(0.0);
        this.backgroundCamera.clearFlags      = mainCamera.clearFlags;
        this.backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        this.backgroundCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
        this.backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        this.backgroundCamera.farClipPlane    = mainCamera.farClipPlane;
        this.foregroundCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
        this.foregroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        this.foregroundCamera.farClipPlane    = mainCamera.farClipPlane;
        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose pose = default(OVRPose);
            pose = OVRExtensions.ToWorldSpacePose(new OVRPose
            {
                position    = OVRMixedReality.fakeCameraPositon,
                orientation = OVRMixedReality.fakeCameraRotation
            });
            this.backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            this.backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            this.backgroundCamera.transform.FromOVRPose(pose, false);
            this.foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            this.foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            this.foregroundCamera.transform.FromOVRPose(pose, false);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics cameraIntrinsics;
            if (!OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out cameraIntrinsics))
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
            OVRPose pose2       = base.ComputeCameraWorldSpacePose(extrinsics);
            float   fieldOfView = Mathf.Atan(cameraIntrinsics.FOVPort.UpTan) * 57.29578f * 2f;
            float   aspect      = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
            this.backgroundCamera.fieldOfView = fieldOfView;
            this.backgroundCamera.aspect      = aspect;
            this.backgroundCamera.transform.FromOVRPose(pose2, false);
            this.foregroundCamera.fieldOfView = fieldOfView;
            this.foregroundCamera.aspect      = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
            this.foregroundCamera.transform.FromOVRPose(pose2, false);
        }
        Vector3 b = mainCamera.transform.position - this.foregroundCamera.transform.position;

        b.y = 0f;
        this.cameraProxyPlane.transform.position = mainCamera.transform.position;
        this.cameraProxyPlane.transform.LookAt(this.cameraProxyPlane.transform.position + b);
    }
    protected OVRCameraComposition(OVRManager.CameraDevice inCameraDevice, bool inUseDynamicLighting, OVRManager.DepthQuality depthQuality)
    {
        this.cameraDevice          = OVRCompositionUtil.ConvertCameraDevice(inCameraDevice);
        this.hasCameraDeviceOpened = false;
        this.useDynamicLighting    = inUseDynamicLighting;
        bool flag = OVRPlugin.DoesCameraDeviceSupportDepth(this.cameraDevice);

        if (this.useDynamicLighting && !flag)
        {
            Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
        }
        if (OVRPlugin.IsCameraDeviceAvailable(this.cameraDevice))
        {
            OVRPlugin.CameraExtrinsics cameraExtrinsics;
            OVRPlugin.CameraIntrinsics cameraIntrinsics;
            if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics))
            {
                OVRPlugin.SetCameraDevicePreferredColorFrameSize(this.cameraDevice, cameraIntrinsics.ImageSensorPixelResolution.w, cameraIntrinsics.ImageSensorPixelResolution.h);
            }
            if (this.useDynamicLighting)
            {
                OVRPlugin.SetCameraDeviceDepthSensingMode(this.cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
                OVRPlugin.CameraDeviceDepthQuality depthQuality2 = OVRPlugin.CameraDeviceDepthQuality.Medium;
                if (depthQuality == OVRManager.DepthQuality.Low)
                {
                    depthQuality2 = OVRPlugin.CameraDeviceDepthQuality.Low;
                }
                else if (depthQuality == OVRManager.DepthQuality.Medium)
                {
                    depthQuality2 = OVRPlugin.CameraDeviceDepthQuality.Medium;
                }
                else if (depthQuality == OVRManager.DepthQuality.High)
                {
                    depthQuality2 = OVRPlugin.CameraDeviceDepthQuality.High;
                }
                else
                {
                    Debug.LogWarning("Unknown depth quality");
                }
                OVRPlugin.SetCameraDevicePreferredDepthQuality(this.cameraDevice, depthQuality2);
            }
            OVRPlugin.OpenCameraDevice(this.cameraDevice);
            if (OVRPlugin.HasCameraDeviceOpened(this.cameraDevice))
            {
                this.hasCameraDeviceOpened = true;
            }
        }
    }
Exemple #8
0
    void Initialize()
    {
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
        if (inited)
        {
            return;
        }

#if OVR_ANDROID_MRC
        if (!OVRPlugin.Media.GetInitialized())
        {
            return;
        }
#else
        if (!OVRPlugin.IsMixedRealityInitialized())
        {
            return;
        }
#endif

        OVRPlugin.ResetDefaultExternalCamera();
        Debug.LogFormat("GetExternalCameraCount before adding manual external camera {0}", OVRPlugin.GetExternalCameraCount());
        UpdateDefaultExternalCamera();
        Debug.LogFormat("GetExternalCameraCount after adding manual external camera {0}", OVRPlugin.GetExternalCameraCount());

        // obtain default FOV
        {
            OVRPlugin.CameraIntrinsics cameraIntrinsics;
            OVRPlugin.CameraExtrinsics cameraExtrinsics;
            OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics);
            defaultFov = cameraIntrinsics.FOVPort;
        }

        inited = true;
#if OVR_ANDROID_MRC
        readyToSwitch = true;
#endif
#endif
    }
Exemple #9
0
    public override void Update(GameObject gameObject, Camera mainCamera)
    {
        if (!hasCameraDeviceOpened)
        {
            return;
        }

        RefreshCameraObjects(gameObject, mainCamera);

        if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
        {
            Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
        }

        directCompositionCamera.clearFlags      = mainCamera.clearFlags;
        directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
        directCompositionCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        directCompositionCamera.nearClipPlane   = mainCamera.nearClipPlane;
        directCompositionCamera.farClipPlane    = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation       = OVRMixedReality.fakeCameraRotation;
            directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            directCompositionCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            if (cameraInTrackingSpace)
            {
                directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                OVRPose worldSpacePose = new OVRPose();
                worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
                directCompositionCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;
            OVRPlugin.Posef            calibrationRawPose;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics, out calibrationRawPose))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                directCompositionCamera.fieldOfView = fovY;
                directCompositionCamera.aspect      = aspect;
                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics, calibrationRawPose);
                    directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics, calibrationRawPose);
                    directCompositionCamera.transform.FromOVRPose(worldSpacePose);
                }
            }
            else
            {
                Debug.LogWarning("Failed to get external camera information");
            }
        }

        if (hasCameraDeviceOpened)
        {
            if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
            {
                boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
                boundaryMeshMaskTexture.Create();
            }
            UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, boundaryMeshMaskTexture);
            directCompositionCamera.GetComponent <OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
        }
    }
    private static void UpdateDirectCompositionObjects(Camera mainCamera)
    {
        directCompositionCamera.clearFlags      = mainCamera.clearFlags;
        directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
        directCompositionCamera.cullingMask     = mainCamera.cullingMask;
        directCompositionCamera.nearClipPlane   = mainCamera.nearClipPlane;
        directCompositionCamera.farClipPlane    = mainCamera.farClipPlane;

        if (useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position    = fakeCameraPositon;
            trackingSpacePose.orientation = fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            directCompositionCamera.fieldOfView = fakeCameraFov;
            directCompositionCamera.aspect      = fakeCameraAspect;
            directCompositionCamera.transform.FromOVRPose(worldSpacePose);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPose worldSpacePose    = new OVRPose();
                OVRPose trackingSpacePose = new OVRPose();

                OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
                trackingSpacePose = cameraTrackingSpacePose;

                if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
                {
                    OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
                    trackingSpacePose = attachedNodePose * trackingSpacePose;
                }

                worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                directCompositionCamera.fieldOfView = fovY;
                directCompositionCamera.aspect      = aspect;
                directCompositionCamera.transform.FromOVRPose(worldSpacePose);
            }
            else
            {
                Debug.LogWarning("Failed to get external camera information");
            }
        }

        if (OVRPlugin.HasCameraDeviceOpened(directCompositionCameraDevice))
        {
            if (OVRPlugin.IsCameraDeviceColorFrameAvailable(directCompositionCameraDevice))
            {
                cameraFramePlaneObject.GetComponent <MeshRenderer>().material.mainTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(directCompositionCameraDevice);
            }
        }

        Vector3 offset   = mainCamera.transform.position - directCompositionCameraGameObject.transform.position;
        float   distance = Vector3.Dot(directCompositionCameraGameObject.transform.forward, offset);

        cameraFramePlaneObject.transform.position = directCompositionCameraGameObject.transform.position + directCompositionCameraGameObject.transform.forward * distance;
        cameraFramePlaneObject.transform.rotation = directCompositionCameraGameObject.transform.rotation;

        float tanFov = Mathf.Tan(directCompositionCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);

        cameraFramePlaneObject.transform.localScale = new Vector3(distance * directCompositionCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);
    }
    private static void UpdateExternalCompositionObjects(Camera mainCamera)
    {
        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask;
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask;
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position    = fakeCameraPositon;
            trackingSpacePose.orientation = fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = fakeCameraFov;
            backgroundCamera.aspect      = fakeCameraAspect;
            backgroundCamera.transform.FromOVRPose(worldSpacePose);

            foregroundCamera.fieldOfView = fakeCameraFov;
            foregroundCamera.aspect      = fakeCameraAspect;
            foregroundCamera.transform.FromOVRPose(worldSpacePose);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPose worldSpacePose    = new OVRPose();
                OVRPose trackingSpacePose = new OVRPose();

                OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
                trackingSpacePose = cameraTrackingSpacePose;

                if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
                {
                    OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
                    trackingSpacePose = attachedNodePose * trackingSpacePose;
                }

                worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        // Assume player always standing straightly
        Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;

        externalCameraToHeadXZ.y            = 0;
        cameraProxyPlane.transform.position = mainCamera.transform.position;
        cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
    }
	public override void Update(Camera mainCamera)
	{
		if (!hasCameraDeviceOpened)
		{
			return:
		}

		if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
		{
			Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency):
		}

		directCompositionCamera.clearFlags = mainCamera.clearFlags:
		directCompositionCamera.backgroundColor = mainCamera.backgroundColor:
		directCompositionCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers):
		directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane:
		directCompositionCamera.farClipPlane = mainCamera.farClipPlane:

		if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
		{
			OVRPose worldSpacePose = new OVRPose():
			OVRPose trackingSpacePose = new OVRPose():
			trackingSpacePose.position = OVRMixedReality.fakeCameraPositon:
			trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation:
			worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose):

			directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov:
			directCompositionCamera.aspect = OVRMixedReality.fakeCameraAspect:
			directCompositionCamera.transform.FromOVRPose(worldSpacePose):
		}
		else
		{
			OVRPlugin.CameraExtrinsics extrinsics:
			OVRPlugin.CameraIntrinsics intrinsics:

			// So far, only support 1 camera for MR and always use camera index 0
			if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
			{
				OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics):

				float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2:
				float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan:
				directCompositionCamera.fieldOfView = fovY:
				directCompositionCamera.aspect = aspect:
				directCompositionCamera.transform.FromOVRPose(worldSpacePose):
			}
			else
			{
				Debug.LogWarning("Failed to get external camera information"):
			}
		}

		if (hasCameraDeviceOpened)
		{
			if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
			{
				boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8):
				boundaryMeshMaskTexture.Create():
			}
			UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, boundaryMeshMaskTexture):
			directCompositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture:
		}
	}
    public override void Update(Camera mainCamera)
    {
        OVRPlugin.SetHandNodePoseStateLatency(0.0);             // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

#if OVR_ANDROID_MRC
        RefreshAudioFilter();

        int drawTextureIndex = (frameIndex / 2) % 2;
        int castTextureIndex = 1 - drawTextureIndex;

        backgroundCamera.enabled = (frameIndex % 2) == 0;
        foregroundCamera.enabled = (frameIndex % 2) == 1;

        if (frameIndex % 2 == 0)
        {
            if (lastMrcEncodeFrameSyncId != -1)
            {
                OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
                lastMrcEncodeFrameSyncId = -1;
            }
            lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
            SetCameraTargetTexture(drawTextureIndex);
        }

        ++frameIndex;
#endif

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;

            if (cameraInTrackingSpace)
            {
                backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;
            OVRPlugin.Posef            calibrationRawPose;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics, out calibrationRawPose))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics, calibrationRawPose);
                    backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                    foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics, calibrationRawPose);
                    backgroundCamera.transform.FromOVRPose(worldSpacePose);
                    foregroundCamera.transform.FromOVRPose(worldSpacePose);
                }
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        // Assume player always standing straightly
        Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;
        externalCameraToHeadXZ.y            = 0;
        cameraProxyPlane.transform.position = mainCamera.transform.position;
        cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
    }
    public override void Update(Camera mainCamera)
    {
        if (!hasCameraDeviceOpened)
        {
            return;
        }

        frameRealtime = Time.realtimeSinceStartup;

        ++historyRecordCursorIndex;
        if (historyRecordCursorIndex >= historyRecordCount)
        {
            historyRecordCursorIndex = 0;
        }

        if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
        {
            Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
        }

        RefreshRenderTextures(mainCamera);

        bgCamera.clearFlags      = mainCamera.clearFlags;
        bgCamera.backgroundColor = mainCamera.backgroundColor;
        bgCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);

        fgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position    = OVRMixedReality.fakeCameraPositon;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            RefreshCameraPoses(OVRMixedReality.fakeCameraFov, OVRMixedReality.fakeCameraAspect, worldSpacePose);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);

                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                RefreshCameraPoses(fovY, aspect, worldSpacePose);
            }
            else
            {
                Debug.LogWarning("Failed to get external camera information");
            }
        }

        compositionCamera.GetComponent <OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = historyRecordArray[historyRecordCursorIndex].boundaryMeshMaskTexture;
        HistoryRecord record = GetHistoryRecordForComposition();

        UpdateCameraFramePlaneObject(mainCamera, compositionCamera, record.boundaryMeshMaskTexture);
        OVRSandwichCompositionManager compositionManager = compositionCamera.gameObject.GetComponent <OVRSandwichCompositionManager>();

        compositionManager.fgTexture = record.fgRenderTexture;
        compositionManager.bgTexture = record.bgRenderTexture;

        cameraProxyPlane.transform.position = fgCamera.transform.position + fgCamera.transform.forward * cameraFramePlaneDistance;
        cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + fgCamera.transform.forward);
    }
Exemple #15
0
        void Update()
        {
            if (!calibratedCameraPose.HasValue)
            {
                if (!OVRPlugin.Media.GetInitialized())
                {
                    return;
                }

                OVRPlugin.CameraIntrinsics cameraIntrinsics;
                OVRPlugin.CameraExtrinsics cameraExtrinsics;

                if (OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics))
                {
                    calibratedCameraPose = cameraExtrinsics.RelativePose.ToOVRPose();
                }
                else
                {
                    return;
                }
            }

            OVRPose cameraStagePoseInUnits = calibratedCameraPose.Value;

            // Converting position from meters to decimeters (unit used by Open Brush)
            cameraStagePoseInUnits.position *= App.METERS_TO_UNITS;

            // Workaround to fix the OVRExtensions.ToWorldSpacePose() and
            // OVRComposition.ComputeCameraWorldSpacePose() calls when computing
            // the Mixed Reality foreground and background camera positions.
            OVRPose headPose = OVRPose.identity;

            Vector3    pos;
            Quaternion rot;

            if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head,
                                                                   NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))
            {
                headPose.position = pos;
            }

            if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head,
                                                                      NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))
            {
                headPose.orientation = rot;
            }

            OVRPose headPoseInUnits = OVRPose.identity;

            headPoseInUnits.position    = headPose.position * App.METERS_TO_UNITS;
            headPoseInUnits.orientation = headPose.orientation;

            OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(
                OVRPlugin.TrackingOrigin.Stage).ToOVRPose();

            OVRPose stageToLocalPoseInUnits = OVRPose.identity;

            stageToLocalPoseInUnits.position    = stageToLocalPose.position * App.METERS_TO_UNITS;
            stageToLocalPoseInUnits.orientation = stageToLocalPose.orientation;

            OVRPose cameraWorldPoseInUnits = headPoseInUnits.Inverse() * stageToLocalPoseInUnits *
                                             cameraStagePoseInUnits;
            OVRPose cameraStagePoseFix = stageToLocalPose.Inverse() * headPose * cameraWorldPoseInUnits;

            // Override the MRC camera's stage pose
            OVRPlugin.OverrideExternalCameraStaticPose(0, true, cameraStagePoseFix.ToPosef());
        }
    public override void Update(Camera mainCamera)
    {
        OVRPlugin.SetHandNodePoseStateLatency(0.0);                     // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position    = OVRMixedReality.fakeCameraPositon;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            backgroundCamera.transform.FromOVRPose(worldSpacePose);

            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.transform.FromOVRPose(worldSpacePose);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);

                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        // Assume player always standing straightly
        Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;

        externalCameraToHeadXZ.y            = 0;
        cameraProxyPlane.transform.position = mainCamera.transform.position;
        cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
    }
    public override void Update(GameObject gameObject, Camera mainCamera)
    {
        RefreshCameraObjects(gameObject, mainCamera);

        OVRPlugin.SetHandNodePoseStateLatency(0.0);             // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

        // For third-person camera to use for calculating camera position with different anchors
        OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
        OVRPose localToStagePose = stageToLocalPose.Inverse();
        OVRPose head             = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.Head, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose leftC            = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose rightC           = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandRight, OVRPlugin.Step.Render).ToOVRPose();

        OVRPlugin.Media.SetMrcHeadsetControllerPose(head.ToPosef(), leftC.ToPosef(), rightC.ToPosef());

#if OVR_ANDROID_MRC
        RefreshAudioFilter();

        int drawTextureIndex = (frameIndex / 2) % 2;
        int castTextureIndex = 1 - drawTextureIndex;

        backgroundCamera.enabled = (frameIndex % 2) == 0;
        foregroundCamera.enabled = (frameIndex % 2) == 1;

        if (frameIndex % 2 == 0)
        {
            if (lastMrcEncodeFrameSyncId != -1)
            {
                OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
                lastMrcEncodeFrameSyncId = -1;
            }
            lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
            SetCameraTargetTexture(drawTextureIndex);
        }

        ++frameIndex;
#endif

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;

            if (cameraInTrackingSpace)
            {
                backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                    foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(worldSpacePose);
                    foregroundCamera.transform.FromOVRPose(worldSpacePose);
                }
#if OVR_ANDROID_MRC
                cameraPoseTimeArray[drawTextureIndex] = extrinsics.LastChangedTimeSeconds;
#endif
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
        float   clipDistance            = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
        foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
    }