Exemplo n.º 1
0
        /// <summary>
        /// Tracking poses provided by the SDK come in a right-handed coordinate system. If an application
        /// is passing in ovrProjection_LeftHanded into Matrix4f_Projection, then it should also use
        /// this function to flip the HMD tracking poses to be left-handed.
        ///
        /// While this utility function is intended to convert a left-handed ovrPosef into a right-handed
        /// coordinate system, it will also work for converting right-handed to left-handed since the
        /// flip operation is the same for both cases.
        /// </summary>
        /// <param name="pose">Pose that is right-handed</param>
        public OVRBase.Posef Posef_FlipHandedness(OVRBase.Posef pose)
        {
            OVRTypes.Posef inputOutputPose = pose;
            OVR.Posef_FlipHandedness(ref inputOutputPose, ref inputOutputPose);

            return(inputOutputPose);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Computes offset eye poses based on headPose returned by ovrTrackingState.
        /// </summary>
        /// <param name="headPose">
        /// Indicates the HMD position and orientation to use for the calculation.
        /// </param>
        /// <param name="hmdToEyeViewOffset">
        /// Can be ovrEyeRenderDesc.HmdToEyeViewOffset returned from
        /// ovrHmd_GetRenderDesc. For monoscopic rendering, use a vector that is the average
        /// of the two vectors for both eyes.
        /// </param>
        /// <param name="outEyePoses">
        /// If outEyePoses are used for rendering, they should be passed to
        /// SubmitFrame in LayerEyeFov.RenderPose or LayerEyeFovDepth.RenderPose.
        /// </param>
        public void CalcEyePoses(OVRBase.Posef headPose, OVRBase.Vector3f[] hmdToEyeViewOffset, ref OVRBase.Posef[] outEyePoses)
        {
            if (hmdToEyeViewOffset.Length != 2)
            {
                throw new ArgumentException("The hmdToEyeViewOffset argument must contain 2 elements.", "hmdToEyeViewOffset");
            }
            if (outEyePoses.Length != 2)
            {
                throw new ArgumentException("The outEyePoses argument must contain 2 elements.", "outEyePoses");
            }

            if (m_eyePosesPtr == IntPtr.Zero)
            {
                // Allocate and copy managed struct to unmanaged memory.
                m_poseFSize   = Marshal.SizeOf(typeof(OVRBase.Posef));
                m_eyePosesPtr = Marshal.AllocHGlobal(m_poseFSize * 2);
            }

            OVR.CalcEyePoses(headPose, hmdToEyeViewOffset, m_eyePosesPtr);

            outEyePoses[0] = (OVRBase.Posef)Marshal.PtrToStructure(m_eyePosesPtr, typeof(OVRBase.Posef));
            outEyePoses[1] = (OVRBase.Posef)Marshal.PtrToStructure(m_eyePosesPtr + m_poseFSize, typeof(OVRBase.Posef));
        }