ToPosef() 개인적인 메소드

private ToPosef ( ) : OVRPlugin.Posef,
리턴 OVRPlugin.Posef,
    void UpdateDefaultExternalCamera()
    {
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
        // always build a 1080p external camera
        const int   cameraPixelWidth  = 1920;
        const int   cameraPixelHeight = 1080;
        const float cameraAspect      = (float)cameraPixelWidth / cameraPixelHeight;


        string cameraName = "UnityExternalCamera";
        OVRPlugin.CameraIntrinsics cameraIntrinsics = new OVRPlugin.CameraIntrinsics();
        OVRPlugin.CameraExtrinsics cameraExtrinsics = new OVRPlugin.CameraExtrinsics();

        // intrinsics

        cameraIntrinsics.IsValid = OVRPlugin.Bool.True;
        cameraIntrinsics.LastChangedTimeSeconds = Time.time;

        float          vFov = defaultExternalCamera.fieldOfView * Mathf.Deg2Rad;
        float          hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * cameraAspect) * 2.0f;
        OVRPlugin.Fovf fov  = new OVRPlugin.Fovf();
        fov.UpTan   = fov.DownTan = Mathf.Tan(vFov * 0.5f);
        fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);

        cameraIntrinsics.FOVPort = fov;
        cameraIntrinsics.VirtualNearPlaneDistanceMeters = defaultExternalCamera.nearClipPlane;
        cameraIntrinsics.VirtualFarPlaneDistanceMeters  = defaultExternalCamera.farClipPlane;
        cameraIntrinsics.ImageSensorPixelResolution.w   = cameraPixelWidth;
        cameraIntrinsics.ImageSensorPixelResolution.h   = cameraPixelHeight;

        // extrinsics

        cameraExtrinsics.IsValid = OVRPlugin.Bool.True;
        cameraExtrinsics.LastChangedTimeSeconds = Time.time;
        cameraExtrinsics.CameraStatusData       = OVRPlugin.CameraStatus.CameraStatus_Calibrated;
        cameraExtrinsics.AttachedToNode         = OVRPlugin.Node.None;

        Camera       mainCamera = Camera.main;
        OVRCameraRig cameraRig  = mainCamera.GetComponentInParent <OVRCameraRig>();
        if (cameraRig)
        {
            Transform trackingSpace     = cameraRig.trackingSpace;
            OVRPose   trackingSpacePose = trackingSpace.ToOVRPose(false);
            OVRPose   cameraPose        = defaultExternalCamera.transform.ToOVRPose(false);
            OVRPose   relativePose      = trackingSpacePose.Inverse() * cameraPose;
            cameraExtrinsics.RelativePose = relativePose.ToPosef();
        }
        else
        {
            cameraExtrinsics.RelativePose = OVRPlugin.Posef.identity;
        }

        if (!OVRPlugin.SetDefaultExternalCamera(cameraName, ref cameraIntrinsics, ref cameraExtrinsics))
        {
            Debug.LogError("SetDefaultExternalCamera() failed");
        }
#endif
    }
    public override void Update(GameObject gameObject, Camera mainCamera)
    {
        RefreshCameraObjects(gameObject, mainCamera);

        OVRPlugin.SetHandNodePoseStateLatency(0.0);             // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0

        // For third-person camera to use for calculating camera position with different anchors
        OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
        OVRPose localToStagePose = stageToLocalPose.Inverse();
        OVRPose head             = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.Head, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose leftC            = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render).ToOVRPose();
        OVRPose rightC           = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandRight, OVRPlugin.Step.Render).ToOVRPose();

        OVRPlugin.Media.SetMrcHeadsetControllerPose(head.ToPosef(), leftC.ToPosef(), rightC.ToPosef());

#if OVR_ANDROID_MRC
        RefreshAudioFilter();

        int drawTextureIndex = (frameIndex / 2) % 2;
        int castTextureIndex = 1 - drawTextureIndex;

        backgroundCamera.enabled = (frameIndex % 2) == 0;
        foregroundCamera.enabled = (frameIndex % 2) == 1;

        if (frameIndex % 2 == 0)
        {
            if (lastMrcEncodeFrameSyncId != -1)
            {
                OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
                lastMrcEncodeFrameSyncId = -1;
            }
            lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
            SetCameraTargetTexture(drawTextureIndex);
        }

        ++frameIndex;
#endif

        backgroundCamera.clearFlags      = mainCamera.clearFlags;
        backgroundCamera.backgroundColor = mainCamera.backgroundColor;
        backgroundCamera.cullingMask     = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        backgroundCamera.nearClipPlane   = mainCamera.nearClipPlane;
        backgroundCamera.farClipPlane    = mainCamera.farClipPlane;

        foregroundCamera.cullingMask   = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
        foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
        foregroundCamera.farClipPlane  = mainCamera.farClipPlane;

        if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
        {
            OVRPose worldSpacePose    = new OVRPose();
            OVRPose trackingSpacePose = new OVRPose();
            trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
                                         OVRMixedReality.fakeCameraEyeLevelPosition :
                                         OVRMixedReality.fakeCameraFloorLevelPosition;
            trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
            worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

            backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            backgroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;
            foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
            foregroundCamera.aspect      = OVRMixedReality.fakeCameraAspect;

            if (cameraInTrackingSpace)
            {
                backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
            }
            else
            {
                backgroundCamera.transform.FromOVRPose(worldSpacePose);
                foregroundCamera.transform.FromOVRPose(worldSpacePose);
            }
        }
        else
        {
            OVRPlugin.CameraExtrinsics extrinsics;
            OVRPlugin.CameraIntrinsics intrinsics;

            // So far, only support 1 camera for MR and always use camera index 0
            if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
            {
                float fovY   = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
                float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
                backgroundCamera.fieldOfView = fovY;
                backgroundCamera.aspect      = aspect;
                foregroundCamera.fieldOfView = fovY;
                foregroundCamera.aspect      = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;

                if (cameraInTrackingSpace)
                {
                    OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                    foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
                }
                else
                {
                    OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
                    backgroundCamera.transform.FromOVRPose(worldSpacePose);
                    foregroundCamera.transform.FromOVRPose(worldSpacePose);
                }
#if OVR_ANDROID_MRC
                cameraPoseTimeArray[drawTextureIndex] = extrinsics.LastChangedTimeSeconds;
#endif
            }
            else
            {
                Debug.LogError("Failed to get external camera information");
                return;
            }
        }

        Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
        float   clipDistance            = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
        foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
    }
예제 #3
0
    // Pose Helper Functions
    public static OVRPlugin.Posef UnityWorldSpacePoseToTrackingSpacePose(Transform pose)
    {
        OVRPose trackingPose = OVRExtensions.ToTrackingSpacePose(pose, MainCamera);

        return(trackingPose.ToPosef());
    }
    // Update is called once per frame
    void Update()
    {
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
        if (!inited)
        {
            Initialize();
            return;
        }

        if (!defaultExternalCamera)
        {
            return;
        }

#if OVR_ANDROID_MRC
        if (!OVRPlugin.Media.GetInitialized())
        {
            return;
        }
#else
        if (!OVRPlugin.IsMixedRealityInitialized())
        {
            return;
        }
#endif

        if (OVRInput.GetDown(OVRInput.Button.One))
        {
            if (currentMode == CameraMode.ThirdPerson)
            {
                currentMode = CameraMode.Normal;
            }
            else
            {
                currentMode = currentMode + 1;
            }

            Debug.LogFormat("Camera mode change to {0}", currentMode);
        }

        if (currentMode == CameraMode.Normal)
        {
            UpdateDefaultExternalCamera();
            OVRPlugin.OverrideExternalCameraFov(0, false, new OVRPlugin.Fovf());
            OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
        }
        else if (currentMode == CameraMode.OverrideFov)
        {
            OVRPlugin.Fovf fov    = defaultFov;
            OVRPlugin.Fovf newFov = new OVRPlugin.Fovf();
            newFov.LeftTan  = fov.LeftTan * 2.0f;
            newFov.RightTan = fov.RightTan * 2.0f;
            newFov.UpTan    = fov.UpTan * 2.0f;
            newFov.DownTan  = fov.DownTan * 2.0f;

            OVRPlugin.OverrideExternalCameraFov(0, true, newFov);
            OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);

            if (!OVRPlugin.GetUseOverriddenExternalCameraFov(0))
            {
                Debug.LogWarning("FOV not overridden");
            }
        }
        else if (currentMode == CameraMode.ThirdPerson)
        {
            Camera camera = GetComponent <Camera>();
            if (camera == null)
            {
                return;
            }

            float          vFov = camera.fieldOfView * Mathf.Deg2Rad;
            float          hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * camera.aspect) * 2.0f;
            OVRPlugin.Fovf fov  = new OVRPlugin.Fovf();
            fov.UpTan   = fov.DownTan = Mathf.Tan(vFov * 0.5f);
            fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);
            OVRPlugin.OverrideExternalCameraFov(0, true, fov);

            Camera       mainCamera = Camera.main;
            OVRCameraRig cameraRig  = mainCamera.GetComponentInParent <OVRCameraRig>();
            if (cameraRig)
            {
                Transform       trackingSpace     = cameraRig.trackingSpace;
                OVRPose         trackingSpacePose = trackingSpace.ToOVRPose(false);
                OVRPose         cameraPose        = transform.ToOVRPose(false);
                OVRPose         relativePose      = trackingSpacePose.Inverse() * cameraPose;
                OVRPlugin.Posef relativePosef     = relativePose.ToPosef();
                OVRPlugin.OverrideExternalCameraStaticPose(0, true, relativePosef);
            }
            else
            {
                OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
            }

            if (!OVRPlugin.GetUseOverriddenExternalCameraFov(0))
            {
                Debug.LogWarning("FOV not overridden");
            }

            if (!OVRPlugin.GetUseOverriddenExternalCameraStaticPose(0))
            {
                Debug.LogWarning("StaticPose not overridden");
            }
        }
#endif
    }
예제 #5
0
        void Update()
        {
            if (!calibratedCameraPose.HasValue)
            {
                if (!OVRPlugin.Media.GetInitialized())
                {
                    return;
                }

                OVRPlugin.CameraIntrinsics cameraIntrinsics;
                OVRPlugin.CameraExtrinsics cameraExtrinsics;

                if (OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics))
                {
                    calibratedCameraPose = cameraExtrinsics.RelativePose.ToOVRPose();
                }
                else
                {
                    return;
                }
            }

            OVRPose cameraStagePoseInUnits = calibratedCameraPose.Value;

            // Converting position from meters to decimeters (unit used by Open Brush)
            cameraStagePoseInUnits.position *= App.METERS_TO_UNITS;

            // Workaround to fix the OVRExtensions.ToWorldSpacePose() and
            // OVRComposition.ComputeCameraWorldSpacePose() calls when computing
            // the Mixed Reality foreground and background camera positions.
            OVRPose headPose = OVRPose.identity;

            Vector3    pos;
            Quaternion rot;

            if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head,
                                                                   NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))
            {
                headPose.position = pos;
            }

            if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head,
                                                                      NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))
            {
                headPose.orientation = rot;
            }

            OVRPose headPoseInUnits = OVRPose.identity;

            headPoseInUnits.position    = headPose.position * App.METERS_TO_UNITS;
            headPoseInUnits.orientation = headPose.orientation;

            OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(
                OVRPlugin.TrackingOrigin.Stage).ToOVRPose();

            OVRPose stageToLocalPoseInUnits = OVRPose.identity;

            stageToLocalPoseInUnits.position    = stageToLocalPose.position * App.METERS_TO_UNITS;
            stageToLocalPoseInUnits.orientation = stageToLocalPose.orientation;

            OVRPose cameraWorldPoseInUnits = headPoseInUnits.Inverse() * stageToLocalPoseInUnits *
                                             cameraStagePoseInUnits;
            OVRPose cameraStagePoseFix = stageToLocalPose.Inverse() * headPose * cameraWorldPoseInUnits;

            // Override the MRC camera's stage pose
            OVRPlugin.OverrideExternalCameraStaticPose(0, true, cameraStagePoseFix.ToPosef());
        }