Miscellaneous extension methods that any script can use.
예제 #1
0
    internal OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
    {
        OVRPose ovrpose  = default(OVRPose);
        OVRPose ovrpose2 = default(OVRPose);
        OVRPose ovrpose3 = extrinsics.RelativePose.ToOVRPose();

        ovrpose2 = ovrpose3;
        if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
        {
            if (this.usingLastAttachedNodePose)
            {
                Debug.Log("The camera attached node get tracked");
                this.usingLastAttachedNodePose = false;
            }
            OVRPose lhs = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
            this.lastAttachedNodePose = lhs;
            ovrpose2 = lhs * ovrpose2;
        }
        else if (extrinsics.AttachedToNode != OVRPlugin.Node.None)
        {
            if (!this.usingLastAttachedNodePose)
            {
                Debug.LogWarning("The camera attached node could not be tracked, using the last pose");
                this.usingLastAttachedNodePose = true;
            }
            ovrpose2 = this.lastAttachedNodePose * ovrpose2;
        }
        return(OVRExtensions.ToWorldSpacePose(ovrpose2));
    }
예제 #2
0
    public override ulong CreateSpatialAnchor(Transform T_UnityWorld_Anchor)
    {
        OVRPlugin.SpatialEntityAnchorCreateInfo createInfo = new OVRPlugin.SpatialEntityAnchorCreateInfo()
        {
            Time         = OVRPlugin.GetTimeInSeconds(),
            BaseTracking = OVRPlugin.GetTrackingOriginType(),
            PoseInSpace  = OVRExtensions.ToOVRPose(T_UnityWorld_Anchor, false).ToPosef()
        };

        ulong anchorHandle = AnchorSession.kInvalidHandle;

        if (OVRPlugin.SpatialEntityCreateSpatialAnchor(createInfo, ref anchorHandle))
        {
            Log("Spatial anchor created with handle: " + anchorHandle);
        }
        else
        {
            Log("OVRPlugin.SpatialEntityCreateSpatialAnchor failed");
        }

        tryEnableComponent(anchorHandle, OVRPlugin.SpatialEntityComponentType.Locatable);
        tryEnableComponent(anchorHandle, OVRPlugin.SpatialEntityComponentType.Storable);

        return(anchorHandle);
    }
예제 #3
0
    public OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics, OVRPlugin.Posef calibrationRawPose)
    {
        OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics, calibrationRawPose);
        OVRPose worldSpacePose    = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

        return(worldSpacePose);
    }
예제 #4
0
	protected OVRPose lastAttachedNodePose = new OVRPose():            // Sometimes the attach node pose is not readable (lose tracking, low battery, etc.) Use the last pose instead when it happens

	internal OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
	{
		OVRPose worldSpacePose = new OVRPose():
		OVRPose trackingSpacePose = new OVRPose():

		OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose():
		trackingSpacePose = cameraTrackingSpacePose:

		if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
		{
			if (usingLastAttachedNodePose)
			{
				Debug.Log("The camera attached node get tracked"):
				usingLastAttachedNodePose = false:
			}
			OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose():
			lastAttachedNodePose = attachedNodePose:
			trackingSpacePose = attachedNodePose * trackingSpacePose:
		}
		else
		{
			if (extrinsics.AttachedToNode != OVRPlugin.Node.None)
			{
				if (!usingLastAttachedNodePose)
				{
					Debug.LogWarning("The camera attached node could not be tracked, using the last pose"):
					usingLastAttachedNodePose = true:
				}
				trackingSpacePose = lastAttachedNodePose * trackingSpacePose:
			}
		}

		worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose):
		return worldSpacePose:
	}
예제 #5
0
    public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
    {
        OVRPose trackingSpacePose;

        trackingSpacePose.position    = trackingSpacePosition;
        trackingSpacePose.orientation = Quaternion.identity;
        return(OVRExtensions.ToWorldSpacePose(trackingSpacePose).position);
    }
예제 #6
0
	public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
	{
		OVRPose tsPose:
		tsPose.position = trackingSpacePosition:
		tsPose.orientation = Quaternion.identity:
		OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose):
		Vector3 pos = wsPose.position:
		return pos:
	}
예제 #7
0
    public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
    {
        OVRPose tsPose;

        tsPose.position    = trackingSpacePosition;
        tsPose.orientation = Quaternion.identity;
        OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose);
        Vector3 pos    = wsPose.position;

        return(pos);
    }
예제 #8
0
 public override void Update(Camera mainCamera)
 {
     if (!this.hasCameraDeviceOpened)
     {
         return;
     }
     this.frameRealtime = Time.realtimeSinceStartup;
     this.historyRecordCursorIndex++;
     if (this.historyRecordCursorIndex >= this.historyRecordCount)
     {
         this.historyRecordCursorIndex = 0;
     }
     if (!OVRPlugin.SetHandNodePoseStateLatency((double)OVRManager.instance.handPoseStateLatency))
     {
         Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
     }
     this.RefreshRenderTextures(mainCamera);
     this.bgCamera.clearFlags      = mainCamera.clearFlags;
     this.bgCamera.backgroundColor = mainCamera.backgroundColor;
     this.bgCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     this.fgCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     OVRPlugin.CameraExtrinsics extrinsics;
     OVRPlugin.CameraIntrinsics cameraIntrinsics;
     if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
     {
         OVRPose pose = default(OVRPose);
         pose = OVRExtensions.ToWorldSpacePose(new OVRPose
         {
             position    = OVRMixedReality.fakeCameraPositon,
             orientation = OVRMixedReality.fakeCameraRotation
         });
         this.RefreshCameraPoses(OVRMixedReality.fakeCameraFov, OVRMixedReality.fakeCameraAspect, pose);
     }
     else if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out cameraIntrinsics))
     {
         OVRPose pose2  = base.ComputeCameraWorldSpacePose(extrinsics);
         float   fovY   = Mathf.Atan(cameraIntrinsics.FOVPort.UpTan) * 57.29578f * 2f;
         float   aspect = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
         this.RefreshCameraPoses(fovY, aspect, pose2);
     }
     else
     {
         Debug.LogWarning("Failed to get external camera information");
     }
     this.compositionCamera.GetComponent <OVRCameraComposition.OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = this.historyRecordArray[this.historyRecordCursorIndex].boundaryMeshMaskTexture;
     OVRSandwichComposition.HistoryRecord historyRecordForComposition = this.GetHistoryRecordForComposition();
     base.UpdateCameraFramePlaneObject(mainCamera, this.compositionCamera, historyRecordForComposition.boundaryMeshMaskTexture);
     OVRSandwichComposition.OVRSandwichCompositionManager component = this.compositionCamera.gameObject.GetComponent <OVRSandwichComposition.OVRSandwichCompositionManager>();
     component.fgTexture = historyRecordForComposition.fgRenderTexture;
     component.bgTexture = historyRecordForComposition.bgRenderTexture;
     this.cameraProxyPlane.transform.position = this.fgCamera.transform.position + this.fgCamera.transform.forward * this.cameraFramePlaneDistance;
     this.cameraProxyPlane.transform.LookAt(this.cameraProxyPlane.transform.position + this.fgCamera.transform.forward);
 }
예제 #9
0
 public override void Update(Camera mainCamera)
 {
     if (!this.hasCameraDeviceOpened)
     {
         return;
     }
     if (!OVRPlugin.SetHandNodePoseStateLatency((double)OVRManager.instance.handPoseStateLatency))
     {
         Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
     }
     this.directCompositionCamera.clearFlags      = mainCamera.clearFlags;
     this.directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
     this.directCompositionCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     this.directCompositionCamera.nearClipPlane   = mainCamera.nearClipPlane;
     this.directCompositionCamera.farClipPlane    = mainCamera.farClipPlane;
     OVRPlugin.CameraExtrinsics extrinsics;
     OVRPlugin.CameraIntrinsics cameraIntrinsics;
     if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
     {
         OVRPose pose = default(OVRPose);
         pose = OVRExtensions.ToWorldSpacePose(new OVRPose
         {
             position    = OVRMixedReality.fakeCameraPositon,
             orientation = OVRMixedReality.fakeCameraRotation
         });
         this.directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
         this.directCompositionCamera.aspect      = OVRMixedReality.fakeCameraAspect;
         this.directCompositionCamera.transform.FromOVRPose(pose, false);
     }
     else if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out cameraIntrinsics))
     {
         OVRPose pose2       = base.ComputeCameraWorldSpacePose(extrinsics);
         float   fieldOfView = Mathf.Atan(cameraIntrinsics.FOVPort.UpTan) * 57.29578f * 2f;
         float   aspect      = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
         this.directCompositionCamera.fieldOfView = fieldOfView;
         this.directCompositionCamera.aspect      = aspect;
         this.directCompositionCamera.transform.FromOVRPose(pose2, false);
     }
     else
     {
         Debug.LogWarning("Failed to get external camera information");
     }
     if (this.hasCameraDeviceOpened)
     {
         if (this.boundaryMeshMaskTexture == null || this.boundaryMeshMaskTexture.width != Screen.width || this.boundaryMeshMaskTexture.height != Screen.height)
         {
             this.boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
             this.boundaryMeshMaskTexture.Create();
         }
         base.UpdateCameraFramePlaneObject(mainCamera, this.directCompositionCamera, this.boundaryMeshMaskTexture);
         this.directCompositionCamera.GetComponent <OVRCameraComposition.OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = this.boundaryMeshMaskTexture;
     }
 }
예제 #10
0
    public override void Update(Camera mainCamera)
    {
        OVRPlugin.SetHandNodePoseStateLatency(0.0);
        this.backgroundCamera.clearFlags      = mainCamera.clearFlags;
        this.backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        this.backgroundCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
        this.backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        this.backgroundCamera.farClipPlane    = mainCamera.farClipPlane;
        this.foregroundCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
        this.foregroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        this.foregroundCamera.farClipPlane    = mainCamera.farClipPlane;
        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose pose = default(OVRPose);
            pose = OVRExtensions.ToWorldSpacePose(new OVRPose
            {
                position    = OVRMixedReality.fakeCameraPositon,
                orientation = OVRMixedReality.fakeCameraRotation
            });
            this.backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            this.backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            this.backgroundCamera.transform.FromOVRPose(pose, false);
            this.foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            this.foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            this.foregroundCamera.transform.FromOVRPose(pose, false);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics cameraIntrinsics;
            if (!OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out cameraIntrinsics))
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
            OVRPose pose2       = base.ComputeCameraWorldSpacePose(extrinsics);
            float   fieldOfView = Mathf.Atan(cameraIntrinsics.FOVPort.UpTan) * 57.29578f * 2f;
            float   aspect      = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
            this.backgroundCamera.fieldOfView = fieldOfView;
            this.backgroundCamera.aspect      = aspect;
            this.backgroundCamera.transform.FromOVRPose(pose2, false);
            this.foregroundCamera.fieldOfView = fieldOfView;
            this.foregroundCamera.aspect      = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
            this.foregroundCamera.transform.FromOVRPose(pose2, false);
        }
        Vector3 b = mainCamera.transform.position - this.foregroundCamera.transform.position;

        b.y = 0f;
        this.cameraProxyPlane.transform.position = mainCamera.transform.position;
        this.cameraProxyPlane.transform.LookAt(this.cameraProxyPlane.transform.position + b);
    }
예제 #11
0
    void Update()
    {
        // Ref: http://zabaglione.info/archives/462
        var hmd   = OVR.Hmd.GetHmd();
        var state = hmd.GetTrackingState();

        if ((state.StatusFlags & (uint)OVR.ovrStatusBits.ovrStatus_OrientationTracked) != 0)
        {
            var accel = OVRExtensions.ToVector3(state.RawSensorData.Accelerometer);
            if (accel.sqrMagnitude > accelerometerThreshold)
            {
                OnHmdTapped();
            }
        }
    }
    private static void UpdateDirectCompositionObjects(Camera mainCamera)
    {
        directCompositionCamera.clearFlags      = mainCamera.clearFlags;
        directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
        directCompositionCamera.cullingMask     = mainCamera.cullingMask;
        directCompositionCamera.nearClipPlane   = mainCamera.nearClipPlane;
        directCompositionCamera.farClipPlane    = mainCamera.farClipPlane;

        if (useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position    = fakeCameraPositon;
            trackingSpacePose.orientation = fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            directCompositionCamera.fieldOfView = fakeCameraFov;
            directCompositionCamera.aspect      = fakeCameraAspect;
            directCompositionCamera.transform.FromOVRPose(worldSpacePose);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPose worldSpacePose    = new OVRPose();
                OVRPose trackingSpacePose = new OVRPose();

                OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
                trackingSpacePose = cameraTrackingSpacePose;

                if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
                {
                    OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
                    trackingSpacePose = attachedNodePose * trackingSpacePose;
                }

                worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                directCompositionCamera.fieldOfView = fovY;
                directCompositionCamera.aspect      = aspect;
                directCompositionCamera.transform.FromOVRPose(worldSpacePose);
            }
            else
            {
                Debug.LogWarning("Failed to get external camera information");
            }
        }

        if (OVRPlugin.HasCameraDeviceOpened(directCompositionCameraDevice))
        {
            if (OVRPlugin.IsCameraDeviceColorFrameAvailable(directCompositionCameraDevice))
            {
                cameraFramePlaneObject.GetComponent <MeshRenderer>().material.mainTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(directCompositionCameraDevice);
            }
        }

        Vector3 offset   = mainCamera.transform.position - directCompositionCameraGameObject.transform.position;
        float   distance = Vector3.Dot(directCompositionCameraGameObject.transform.forward, offset);

        cameraFramePlaneObject.transform.position = directCompositionCameraGameObject.transform.position + directCompositionCameraGameObject.transform.forward * distance;
        cameraFramePlaneObject.transform.rotation = directCompositionCameraGameObject.transform.rotation;

        float tanFov = Mathf.Tan(directCompositionCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);

        cameraFramePlaneObject.transform.localScale = new Vector3(distance * directCompositionCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);
    }
    private static void UpdateExternalCompositionObjects(Camera mainCamera)
    {
        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask;
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask;
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position    = fakeCameraPositon;
            trackingSpacePose.orientation = fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = fakeCameraFov;
            backgroundCamera.aspect      = fakeCameraAspect;
            backgroundCamera.transform.FromOVRPose(worldSpacePose);

            foregroundCamera.fieldOfView = fakeCameraFov;
            foregroundCamera.aspect      = fakeCameraAspect;
            foregroundCamera.transform.FromOVRPose(worldSpacePose);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPose worldSpacePose    = new OVRPose();
                OVRPose trackingSpacePose = new OVRPose();

                OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
                trackingSpacePose = cameraTrackingSpacePose;

                if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
                {
                    OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
                    trackingSpacePose = attachedNodePose * trackingSpacePose;
                }

                worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        // Assume player always standing straightly
        Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;

        externalCameraToHeadXZ.y            = 0;
        cameraProxyPlane.transform.position = mainCamera.transform.position;
        cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
    }
    public override void Update(GameObject gameObject, Camera mainCamera)
    {
        RefreshCameraObjects(gameObject, mainCamera);

        OVRPlugin.SetHandNodePoseStateLatency(0.0);             // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

        // For third-person camera to use for calculating camera position with different anchors
        OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
        OVRPose localToStagePose = stageToLocalPose.Inverse();
        OVRPose head             = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.Head, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose leftC            = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose rightC           = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandRight, OVRPlugin.Step.Render).ToOVRPose();

        OVRPlugin.Media.SetMrcHeadsetControllerPose(head.ToPosef(), leftC.ToPosef(), rightC.ToPosef());

#if OVR_ANDROID_MRC
        RefreshAudioFilter();

        int drawTextureIndex = (frameIndex / 2) % 2;
        int castTextureIndex = 1 - drawTextureIndex;

        backgroundCamera.enabled = (frameIndex % 2) == 0;
        foregroundCamera.enabled = (frameIndex % 2) == 1;

        if (frameIndex % 2 == 0)
        {
            if (lastMrcEncodeFrameSyncId != -1)
            {
                OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
                lastMrcEncodeFrameSyncId = -1;
            }
            lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
            SetCameraTargetTexture(drawTextureIndex);
        }

        ++frameIndex;
#endif

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;

            if (cameraInTrackingSpace)
            {
                backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                    foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(worldSpacePose);
                    foregroundCamera.transform.FromOVRPose(worldSpacePose);
                }
#if OVR_ANDROID_MRC
                cameraPoseTimeArray[drawTextureIndex] = extrinsics.LastChangedTimeSeconds;
#endif
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
        float   clipDistance            = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
        foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
    }
예제 #15
0
    public override void Update(GameObject gameObject, Camera mainCamera)
    {
        if (!hasCameraDeviceOpened)
        {
            return;
        }

        RefreshCameraObjects(gameObject, mainCamera);

        if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
        {
            Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
        }

        directCompositionCamera.clearFlags      = mainCamera.clearFlags;
        directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
        directCompositionCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        directCompositionCamera.nearClipPlane   = mainCamera.nearClipPlane;
        directCompositionCamera.farClipPlane    = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation       = OVRMixedReality.fakeCameraRotation;
            directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            directCompositionCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            if (cameraInTrackingSpace)
            {
                directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                OVRPose worldSpacePose = new OVRPose();
                worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
                directCompositionCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;
            OVRPlugin.Posef            calibrationRawPose;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics, out calibrationRawPose))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                directCompositionCamera.fieldOfView = fovY;
                directCompositionCamera.aspect      = aspect;
                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics, calibrationRawPose);
                    directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics, calibrationRawPose);
                    directCompositionCamera.transform.FromOVRPose(worldSpacePose);
                }
            }
            else
            {
                Debug.LogWarning("Failed to get external camera information");
            }
        }

        if (hasCameraDeviceOpened)
        {
            if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
            {
                boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
                boundaryMeshMaskTexture.Create();
            }
            UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, boundaryMeshMaskTexture);
            directCompositionCamera.GetComponent <OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
        }
    }
예제 #16
0
    public static OVRPose TrackingSpacePoseToUnityWorldSpacePose(OVRPlugin.Posef pose)
    {
        OVRPose worldPose = OVRExtensions.ToWorldSpacePose(pose.ToOVRPose());

        return(worldPose);
    }
예제 #17
0
    // Pose Helper Functions
    public static OVRPlugin.Posef UnityWorldSpacePoseToTrackingSpacePose(Transform pose)
    {
        OVRPose trackingPose = OVRExtensions.ToTrackingSpacePose(pose, MainCamera);

        return(trackingPose.ToPosef());
    }
    public override void Update(Camera mainCamera)
    {
        if (!hasCameraDeviceOpened)
        {
            return;
        }

        frameRealtime = Time.realtimeSinceStartup;

        ++historyRecordCursorIndex;
        if (historyRecordCursorIndex >= historyRecordCount)
        {
            historyRecordCursorIndex = 0;
        }

        if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
        {
            Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
        }

        RefreshRenderTextures(mainCamera);

        bgCamera.clearFlags      = mainCamera.clearFlags;
        bgCamera.backgroundColor = mainCamera.backgroundColor;
        bgCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);

        fgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position    = OVRMixedReality.fakeCameraPositon;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            RefreshCameraPoses(OVRMixedReality.fakeCameraFov, OVRMixedReality.fakeCameraAspect, worldSpacePose);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);

                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                RefreshCameraPoses(fovY, aspect, worldSpacePose);
            }
            else
            {
                Debug.LogWarning("Failed to get external camera information");
            }
        }

        compositionCamera.GetComponent <OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = historyRecordArray[historyRecordCursorIndex].boundaryMeshMaskTexture;
        HistoryRecord record = GetHistoryRecordForComposition();

        UpdateCameraFramePlaneObject(mainCamera, compositionCamera, record.boundaryMeshMaskTexture);
        OVRSandwichCompositionManager compositionManager = compositionCamera.gameObject.GetComponent <OVRSandwichCompositionManager>();

        compositionManager.fgTexture = record.fgRenderTexture;
        compositionManager.bgTexture = record.bgRenderTexture;

        cameraProxyPlane.transform.position = fgCamera.transform.position + fgCamera.transform.forward * cameraFramePlaneDistance;
        cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + fgCamera.transform.forward);
    }
    public override void Update(Camera mainCamera)
    {
        OVRPlugin.SetHandNodePoseStateLatency(0.0);             // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

#if OVR_ANDROID_MRC
        RefreshAudioFilter();

        int drawTextureIndex = (frameIndex / 2) % 2;
        int castTextureIndex = 1 - drawTextureIndex;

        backgroundCamera.enabled = (frameIndex % 2) == 0;
        foregroundCamera.enabled = (frameIndex % 2) == 1;

        if (frameIndex % 2 == 0)
        {
            if (lastMrcEncodeFrameSyncId != -1)
            {
                OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
                lastMrcEncodeFrameSyncId = -1;
            }
            lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
            SetCameraTargetTexture(drawTextureIndex);
        }

        ++frameIndex;
#endif

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;

            if (cameraInTrackingSpace)
            {
                backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;
            OVRPlugin.Posef            calibrationRawPose;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics, out calibrationRawPose))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics, calibrationRawPose);
                    backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                    foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics, calibrationRawPose);
                    backgroundCamera.transform.FromOVRPose(worldSpacePose);
                    foregroundCamera.transform.FromOVRPose(worldSpacePose);
                }
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        // Assume player always standing straightly
        Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;
        externalCameraToHeadXZ.y            = 0;
        cameraProxyPlane.transform.position = mainCamera.transform.position;
        cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
    }
예제 #20
0
    public override void Update(Camera mainCamera)
    {
        OVRPlugin.SetHandNodePoseStateLatency(0.0);                     // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position    = OVRMixedReality.fakeCameraPositon;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            backgroundCamera.transform.FromOVRPose(worldSpacePose);

            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.transform.FromOVRPose(worldSpacePose);
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);

                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        // Assume player always standing straightly
        Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;

        externalCameraToHeadXZ.y            = 0;
        cameraProxyPlane.transform.position = mainCamera.transform.position;
        cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
    }
예제 #21
0
	public override void Update(Camera mainCamera)
	{
		if (!hasCameraDeviceOpened)
		{
			return:
		}

		if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
		{
			Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency):
		}

		directCompositionCamera.clearFlags = mainCamera.clearFlags:
		directCompositionCamera.backgroundColor = mainCamera.backgroundColor:
		directCompositionCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers):
		directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane:
		directCompositionCamera.farClipPlane = mainCamera.farClipPlane:

		if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
		{
			OVRPose worldSpacePose = new OVRPose():
			OVRPose trackingSpacePose = new OVRPose():
			trackingSpacePose.position = OVRMixedReality.fakeCameraPositon:
			trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation:
			worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose):

			directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov:
			directCompositionCamera.aspect = OVRMixedReality.fakeCameraAspect:
			directCompositionCamera.transform.FromOVRPose(worldSpacePose):
		}
		else
		{
			OVRPlugin.CameraExtrinsics extrinsics:
			OVRPlugin.CameraIntrinsics intrinsics:

			// So far, only support 1 camera for MR and always use camera index 0
			if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
			{
				OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics):

				float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2:
				float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan:
				directCompositionCamera.fieldOfView = fovY:
				directCompositionCamera.aspect = aspect:
				directCompositionCamera.transform.FromOVRPose(worldSpacePose):
			}
			else
			{
				Debug.LogWarning("Failed to get external camera information"):
			}
		}

		if (hasCameraDeviceOpened)
		{
			if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
			{
				boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8):
				boundaryMeshMaskTexture.Create():
			}
			UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, boundaryMeshMaskTexture):
			directCompositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture:
		}
	}