Exemple #1
0
        public void Session_GetEyePoses()
        {
            IntPtr sessionPtr = CreateSession();

            Assert.AreNotEqual(IntPtr.Zero, sessionPtr);

            var    hmdToEyeViewOffset = new Vector3f[2];
            var    poses = new Posef[2];
            double sensorSampleTime;

            hmdToEyeViewOffset[0].X = -0.1f;
            hmdToEyeViewOffset[1].X = 0.1f;

            OVR.GetEyePoses(sessionPtr, 0, true, hmdToEyeViewOffset, ref poses, out sensorSampleTime);

            Assert.AreNotEqual(0, poses[0].Orientation.W);
            Assert.AreNotEqual(0, poses[0].Position.X);
            Assert.AreNotEqual(0, poses[0].Position.Y);
            Assert.AreNotEqual(0, poses[0].Position.Z);
            Assert.AreNotEqual(0, poses[1].Orientation.W);
            Assert.AreNotEqual(0, poses[1].Position.X);
            Assert.AreNotEqual(0, poses[1].Position.Y);
            Assert.AreNotEqual(0, poses[1].Position.Z);
            Assert.AreNotEqual(0, hmdToEyeViewOffset[0].X);
            Assert.AreEqual(0, hmdToEyeViewOffset[0].Y);
            Assert.AreEqual(0, hmdToEyeViewOffset[0].Z);
            Assert.AreNotEqual(0, hmdToEyeViewOffset[1].X);
            Assert.AreEqual(0, hmdToEyeViewOffset[1].Y);
            Assert.AreEqual(0, hmdToEyeViewOffset[1].Z);
        }
Exemple #2
0
        public void CalcEyePoses()
        {
            IntPtr sessionPtr = CreateSession();

            Assert.AreNotEqual(IntPtr.Zero, sessionPtr);

            var hmdToEyeViewOffset = new Vector3f[2];
            var eyePoses           = new Posef[2];
            var headPose           = new Posef();

            hmdToEyeViewOffset[0].X = -0.1f;
            hmdToEyeViewOffset[1].X = 0.1f;

            // Define a head position looking forward, from a position where the head is 1.75 meters above the ground.
            headPose.Orientation.W = 1;
            headPose.Position.Y    = 1.75f;

            OVR.CalcEyePoses(headPose, hmdToEyeViewOffset, ref eyePoses);

            Assert.AreEqual(headPose.Orientation.X, eyePoses[0].Orientation.X);
            Assert.AreEqual(headPose.Orientation.Y, eyePoses[0].Orientation.Y);
            Assert.AreEqual(headPose.Orientation.Z, eyePoses[0].Orientation.Z);
            Assert.AreEqual(headPose.Orientation.W, eyePoses[0].Orientation.W);
            Assert.AreEqual(hmdToEyeViewOffset[0].X, eyePoses[0].Position.X);
            Assert.AreEqual(headPose.Position.Y, eyePoses[0].Position.Y);
            Assert.AreEqual(0, eyePoses[0].Position.Z);

            Assert.AreEqual(headPose.Orientation.X, eyePoses[1].Orientation.X);
            Assert.AreEqual(headPose.Orientation.Y, eyePoses[1].Orientation.Y);
            Assert.AreEqual(headPose.Orientation.Z, eyePoses[1].Orientation.Z);
            Assert.AreEqual(headPose.Orientation.W, eyePoses[1].Orientation.W);
            Assert.AreEqual(hmdToEyeViewOffset[1].X, eyePoses[1].Position.X);
            Assert.AreEqual(headPose.Position.Y, eyePoses[1].Position.Y);
            Assert.AreEqual(0, eyePoses[1].Position.Z);
        }
 public static MixedRealityPose ToMixedRealityPose(this Posef p)
 {
     return(new MixedRealityPose
            (
                position: new Vector3(p.Position.x, p.Position.y, -p.Position.z),
                rotation: new Quaternion(-p.Orientation.x, -p.Orientation.y, p.Orientation.z, p.Orientation.w)
            ));
 }
 /// <summary>
 ///
 /// </summary>
 /// <param name="pose">The pose.</param>
 /// <returns></returns>
 public static AvatarTransform AvatarTransformFromPosef(Posef pose)
 {
     return(new AvatarTransform()
     {
         Position = AvatarVector3fFromVector3f(pose.Position),
         Orientation = AvatarQuatfFromQuaternionf(pose.Orientation),
     });
 }
 /// <summary>
 /// Converts a plain C# Posef to a Unity OVRPose.
 /// </summary>
 /// <returns>The pose as a Unity OVRPose.</returns>
 /// <param name="size">The pose as a C# Posef.</param>
 public static OVRPose ToPose(this Posef pose, bool rhToLh = true)
 {
     return(new OVRPose
     {
         position = pose.Position.ToVector3(rhToLh),
         orientation = pose.Orientation.ToQuaternion(rhToLh)
     });
 }
Exemple #6
0
 /// <summary>
 /// Creates a new LayerEyeMatrix.
 /// </summary>
 public LayerEyeMatrix()
 {
     Header       = new LayerHeader();
     Header.Type  = LayerType.EyeMatrix;
     ColorTexture = new IntPtr[2];
     Viewport     = new Recti[2];
     RenderPose   = new Posef[2];
     Matrix       = new Matrix4[2];
 }
Exemple #7
0
 /// <summary>
 /// Creates a new LayerEyeFov.
 /// </summary>
 public LayerEyeFov()
 {
     Header       = new LayerHeader();
     Header.Type  = LayerType.EyeFov;
     ColorTexture = new IntPtr[2];
     Viewport     = new Recti[2];
     Fov          = new FovPort[2];
     RenderPose   = new Posef[2];
 }
Exemple #8
0
 /// <summary>
 /// Creates a new LayerQuad.
 /// </summary>
 public LayerQuad()
 {
     Header         = new LayerHeader();
     Header.Type    = LayerType.Quad;
     ColorTexture   = IntPtr.Zero;
     Viewport       = new Recti();
     QuadPoseCenter = new Posef();
     QuadSize       = new Vector2();
 }
Exemple #9
0
        public void Session_SpecifyTrackingOrigin()
        {
            IntPtr sessionPtr = CreateSession();

            Assert.AreNotEqual(IntPtr.Zero, sessionPtr);

            var posef = new Posef()
            {
                Position    = new Vector3f(0, 0, 0),
                Orientation = new Quaternionf(0, 1, 0, 0)
            };

            Result result = OVR.SpecifyTrackingOrigin(sessionPtr, posef);

            Assert.IsTrue(result >= Result.Success, "Failed to call SpecifyTrackingOrigin");
        }
Exemple #10
0
        public void Session_Posef_FlipHandedness()
        {
            Posef pose       = new Posef();
            Posef resultPose = new Posef();

            pose.Position    = new Vector3f(1, 1, 1);
            pose.Orientation = new Quaternionf(1, 1, 1, 1);

            OVR.Posef_FlipHandedness(ref pose, ref resultPose);

            Assert.AreEqual(pose.Position.X, -resultPose.Position.X);
            Assert.AreEqual(pose.Position.Y, resultPose.Position.Y);
            Assert.AreEqual(pose.Position.Z, resultPose.Position.Z);

            Assert.AreEqual(-pose.Orientation.X, resultPose.Orientation.X);
            Assert.AreEqual(pose.Orientation.Y, resultPose.Orientation.Y);
            Assert.AreEqual(pose.Orientation.Z, resultPose.Orientation.Z);
            Assert.AreEqual(-pose.Orientation.W, resultPose.Orientation.W);
        }
Exemple #11
0
			protected override void OnUpdateFrame(FrameEventArgs e)
			{
				base.OnUpdateFrame(e);

				//get the time the frame will be displayed on the oculus
				double displayMidpoint = OvrDLL.ovr_GetPredictedDisplayTime(session, 0);
				//get the predicted position of the device at that time
				TrackingState ts = OvrDLL.ovr_GetTrackingState(session, displayMidpoint, true);
				//calculate eye poses
				Vector3[] eyeOffsets = new Vector3[2] { eyes[0].desc.HmdToEyeOffset, eyes[1].desc.HmdToEyeOffset };
				Posef[] eyePoses = new Posef[2];
				OvrDLL.ovr_CalcEyePoses(ts.HeadPose.ThePose, eyeOffsets, eyePoses);

				//get the orientation of the hmd if it was tracked
				if (ts.StatusFlags.HasFlag(StatusBits.OrientationTracked))
				{
					eyes[0].pose.Orientation = eyePoses[0].Orientation;
					eyes[1].pose.Orientation = eyePoses[1].Orientation;
				}
				else
				{
					eyes[0].pose.Orientation = Quaternion.Identity;
					eyes[1].pose.Orientation = Quaternion.Identity;
				}

				//get the position of the hmd if it was tracked
				if (ts.StatusFlags.HasFlag(StatusBits.PositionTracked))
				{
					eyes[0].pose.Position = eyePoses[0].Position;
					eyes[1].pose.Position = eyePoses[1].Position;
				}
				else
				{
					eyes[0].pose.Position = Vector3.Zero;
					eyes[1].pose.Position = Vector3.Zero;
				}
			}
Exemple #12
0
        static void test_operator()
        {
            test_name("operator");

            // TVector
            Vec3f v3a = new Vec3f(0.1f, 0.2f, 0.3f);
            Vec3f v3b = new Vec3f(0.4f, 0.5f, 0.6f);
            Vec3f v3c = new Vec3f(0.1f, 0.2f, 0.3f);    // v3c == v3a
            Vec3f v3d;

            put("vector unary  -", "(-0.1, -0.2, -0.3)", -v3a);
            put("vector binary +", "( 0.5,  0.7,  0.9)", v3a + v3b);
            put("vector binary -", "(-0.3, -0.3, -0.3)", v3a - v3b);
            put("vector binary *", "( 0.2,  0.4,  0.6)", v3a * 2);
            put("vector binary *", "( 0.8,  1.0,  1.2)", 2 * v3b);
            put("vector binary /", "( 0.05, 0.10,  0.15)", v3a / 2);
            put("vector binary *", "  0.32", v3a * v3b);
            put("vector binary %", "(-0.03, 0.06, -0.03)", v3a % v3b);
            put("vector binary ^", "(-0.03, 0.06, -0.03)", v3a ^ v3b);
            v3d = v3a; v3d += v3b; put("vector binary +=", "( 0.5,  0.7,  0.9)", v3d);
            v3d = v3a; v3d -= v3b; put("vector binary -=", "(-0.3, -0.3, -0.3)", v3d);
            v3d = v3a; v3d *= 2;   put("vector binary *=", "( 0.2,  0.4,  0.6)", v3d);
            v3d = v3a; v3d /= 2;   put("vector binary /=", "(0.05, 0.10, 0.15)", v3d);
            put("vector binary ==", "True ", v3a == v3c);
            put("vector binary ==", "False", v3a == v3b);
            put("vector binary !=", "True ", v3a != v3b);
            put("vector binary !=", "False", v3a != v3c);

            // TMatrix
            Matrix3f m3a = new Matrix3f(0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f);
            Matrix3f m3b = new Matrix3f(1.1f, 1.2f, 1.3f, 1.4f, 1.5f, 1.6f, 1.7f, 1.8f, 1.9f);
            Matrix3f m3c = new Matrix3f(0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f);  // m3c == m3a
            Matrix3f m3d;
            Matrix3f m3r = new Matrix3f(-0.1f, -0.2f, -0.3f, -0.4f, -0.5f, -0.6f, -0.7f, -0.8f, -0.9f);
            Matrix3f m3s = new Matrix3f(1.2f, 1.4f, 1.6f, 1.8f, 2.0f, 2.2f, 2.4f, 2.6f, 2.8f);
            Matrix3f m3t = new Matrix3f(-1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f);
            Matrix3f m3u = new Matrix3f(0.2f, 0.4f, 0.6f, 0.8f, 1.0f, 1.2f, 1.4f, 1.6f, 1.8f);

            put2("matrix unary  -", edit_matrix(m3r), edit_matrix(-m3a));
            put2("matrix binary +", edit_matrix(m3s), edit_matrix(m3a + m3b));
            put2("matrix binary -", edit_matrix(m3t), edit_matrix(m3a - m3b));
            put2("matrix binary *", edit_matrix(m3u), edit_matrix(m3a * 2));
            put2("matrix binary *", edit_matrix(m3u), edit_matrix(2 * m3a));
            put2("matrix binary *", new Vec3d(0.14, 0.32, 0.50), (m3a * v3a));
            put2("matrix binary *", new Vec3d(2.16, 2.31, 2.46), (v3b * m3b));
            m3d = m3a; m3d += m3b; put2("matrix binary +=", edit_matrix(m3s), edit_matrix(m3d));
            m3d = m3a; m3d -= m3b; put2("matrix binary +=", edit_matrix(m3t), edit_matrix(m3d));
            m3d = m3a; m3d *= 2;   put2("matrix binary +=", edit_matrix(m3u), edit_matrix(m3d));

            // TQuaternion
            Quaternionf q1  = new Quaternionf(1.0f, 2.0f, 3.0f, 4.0f);
            Quaternionf q2  = new Quaternionf(5.0f, 6.0f, 7.0f, 8.0f);
            Vec3f       qv1 = new Vec3f(1.0f, 2.0f, 3.0f);
            Matrix3f    qm1 = new Matrix3f(1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f);
            Quaternionf qr  = new Quaternionf(-60.0f, 12.0f, 30.0f, 24.0f);
            Vec3f       qvs = new Vec3f(54f, 60f, 78f);
            Matrix3f    qmt = new Matrix3f(150f, 156f, 162f, 120f, 150f, 180, 150f, 192f, 234f);

            put2("quaternion binary *", edit_quaternion(qr), edit_quaternion(q1 * q2));
            put2("quaternion binary *", edit_vector(qvs), edit_vector(q1 * qv1));
            put2("quaternion binary *", edit_matrix(qmt), edit_matrix(q1 * qm1));

            // TPose
            Posef pp1 = new Posef(1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f);
            Posef pp2 = new Posef(7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f);
            Vec3f pv1 = new Vec3f(1.0f, 2.0f, 3.0f);
            Posef pr1 = new Posef(-36.0f, 12.0f, 42.0f, 24.0f, -25.0f, 66.0f, 97.0f);

            put2("pose binary *", edit_pose(pr1), edit_pose(pp1 * pp2));
            put2("pose binary *", "(59, 66, 85)", (pp1 * pv1));

            // indexing
            System.Console.WriteLine("");
            Vec3f v31 = new Vec3f(0.1f, 0.2f, 0.3f);
            Vec3f v32 = new Vec3f(1.0f, 1.0f, 1.0f);
            Vec3f v3e = v31 + v32;

            for (int i = 0; i < 3; i++)
            {
                v31[i] += v32[i];
            }
            put("indexing []", edit_vector(v3e), edit_vector(v31));

            Matrix3f m31 = new Matrix3f(0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f);
            Matrix3f m32 = new Matrix3f(1.0f, 1.0f, 1.0f, 2.0f, 2.0f, 2.0f, 3.0f, 3.0f, 3.0f);
            Matrix3f m3e = m31 + m32;

            put("indexing [] ref", "(0.1, 0.2, 0.3)", edit_vector(m31[0]));
            put("            ref", "(0.4, 0.5, 0.6)", edit_vector(m31[1]));
            put("            ref", "(0.7, 0.8, 0.9)", edit_vector(m31[2]));
            for (int i = 0; i < 3; i++)
            {
                m31[i] += v32;
            }
            for (int i = 1; i < 3; i++)
            {
                m31[i] += v32;
            }
            for (int i = 2; i < 3; i++)
            {
                m31[i] += v32;
            }
            put2("indexing [] set", edit_matrix(m3e), edit_matrix(m31));
        }
Exemple #13
0
        static void test_func_call()
        {
            test_name("func_call");

            // TVector
            Vec3f v31 = new Vec3f(1.0f, 2.0f, 3.0f);

            put("Vec3f: square ", "14.0", v31.square());
            put("Vec3f: norm   ", "3.7416574", v31.norm());
            Vec3f v3u = v31; v3u.unitize();
            Vec3f v3r = new Vec3f(0.2672612f, 0.5345225f, 0.8017837f);

            put2("Vec3f: unitize", edit_vector(v3r), edit_vector(v3u));

            // TQuaternion
            System.Console.WriteLine("");
            float       pi        = 3.1415927f;
            float       rad90     = pi / 2;
            float       rad45     = pi / 4;
            float       cos45     = 0.7071069f;
            float       sqrt3     = 1.7320508f;
            float       sqrt3div3 = sqrt3 / 3;
            Quaternionf q1        = new Quaternionf(cos45, sqrt3div3, sqrt3div3, sqrt3div3);
            Quaternionf q2        = new Quaternionf(q1.W(), q1.X(), q1.Y(), q1.Z());
            Vec3f       qv1       = new Vec3f(sqrt3div3, sqrt3div3, sqrt3div3);

            put2("Quaternionf: W(),X(),Y(),Z()", edit_quaternion(q1), edit_quaternion(q2));
            put2("Quaternionf: V", edit_vector(qv1), edit_vector(q1.V()));
            put2("Quaternionf: Axis", edit_vector(qv1), edit_vector(q1.Axis()));
            put("Quaternionf: Theta", rad90.ToString(), q1.Theta());
            System.Console.WriteLine("");

            float half = pi / (2 * sqrt3);
            Vec3f qv2  = new Vec3f(half, half, half);

            put2("Quaternionf: RotationHalf", edit_vector(qv2), edit_vector(q1.RotationHalf()));
            put2("Quaternionf: Rotation    ", edit_vector(qv2), edit_vector(q1.Rotation()));

            float       angle = rad90;
            float       d     = sqrt3;
            float       s     = (float)Math.Sin(angle / 2) / d;
            Quaternionf qr    = new Quaternionf((float)Math.Cos(angle / 2), s, s, s);
            Quaternionf q3    = Quaternionf.Rot(angle, new Vec3f(1f, 1f, 1f));

            put2("Quaternionf: Rot", edit_quaternion(qr), edit_quaternion(q3));

            float       c1 = (float)Math.Cos(angle / 2);
            float       s1 = (float)Math.Sin(angle / 2);
            Quaternionf qs = new Quaternionf(c1, s1, 0f, 0f);
            Quaternionf q4 = Quaternionf.Rot(angle, (sbyte)'x');

            put2("Quaternionf: Rot", edit_quaternion(qs), edit_quaternion(q4));

            Vec3f       qv3 = new Vec3f(1f, 1f, 1f);
            float       c2  = (float)Math.Cos(sqrt3 / 2);
            float       s2  = (float)Math.Sin(sqrt3 / 2);
            Vec3f       qv4 = (s2 / sqrt3) * qv3;
            Quaternionf qt  = new Quaternionf(c2, qv4[0], qv4[1], qv4[2]);
            Quaternionf q5  = Quaternionf.Rot(qv3);

            put2("Quaternionf: Rot", edit_quaternion(qt), edit_quaternion(q5));

            Quaternionf qc1 = new Quaternionf(cos45, sqrt3div3, sqrt3div3, sqrt3div3);
            Quaternionf qc2 = new Quaternionf(cos45, sqrt3div3, sqrt3div3, sqrt3div3);
            Quaternionf qc  = new Quaternionf(cos45, -sqrt3div3, -sqrt3div3, -sqrt3div3);

            qc1.Conjugate();
            put2("Quaternionf: Conjugate", edit_quaternion(qc), edit_quaternion(qc1));
            put2("Quaternionf: Conjugated", edit_quaternion(qc), edit_quaternion(qc2.Conjugated()));

            float       qf1 = (float)(cos45 * cos45 + 3 * sqrt3div3 * sqrt3div3);
            Quaternionf qe  = qc2.Conjugated() / qf1;

            put2("Quaternionf: Inv", edit_quaternion(qe), edit_quaternion(qc2.Inv()));

            // TPose
            System.Console.WriteLine("");
            Posef       p1   = new Posef(cos45, sqrt3, sqrt3, sqrt3, 1f, 2f, 3f);
            Posef       p2   = new Posef(p1.W(), p1.X(), p1.Y(), p1.Z(), p1.Px(), p1.Py(), p1.Pz());
            Vec3f       pv31 = new Vec3f(1f, 2f, 3f);
            Quaternionf pq1  = new Quaternionf(cos45, sqrt3, sqrt3, sqrt3);
            Quaternionf pq2  = new Quaternionf();
            Vec3f       pv32 = new Vec3f();
            Posef       pp1  = new Posef(pq2.w, pq2.x, pq2.y, pq2.z, pv32.x, pv32.y, pv32.z);
            Posef       pp2  = new Posef(pq2.w, pq2.x, pq2.y, pq2.z, 1f, 2f, 3f);

            put2("Posef: W(),X(),Y(),Z(),Px(),Py(),Pz()", edit_pose(p1), edit_pose(p2));
            put2("Posef: Pos()", edit_vector(pv31), edit_vector(p1.Pos()));
            put2("Posef: Ori()", edit_quaternion(pq1), edit_quaternion(p1.Ori()));
            put2("Posef: Unit ", edit_pose(pp1), edit_pose(Posef.Unit()));
            put2("Posef: Trn  ", edit_pose(pp2), edit_pose(Posef.Trn(1f, 2f, 3f)));
            put2("Posef: Trn  ", edit_pose(pp2), edit_pose(Posef.Trn(pv31)));
        }
Exemple #14
0
 public static extern Bool ovrp_SetOverlayQuad3(uint flags, IntPtr textureLeft, IntPtr textureRight, IntPtr device, Posef pose, Vector3f scale, int layerIndex);
Exemple #15
0
    public static bool SetOverlayQuad(bool onTop, bool headLocked, IntPtr leftTexture, IntPtr rightTexture, IntPtr device, Posef pose, Vector3f scale, int layerIndex = 0, OverlayShape shape = OverlayShape.Quad)
    {
        if (version >= OVRP_1_6_0.version)
        {
            uint flags = (uint)OverlayFlag.None;
            if (onTop)
            {
                flags |= (uint)OverlayFlag.OnTop;
            }
            if (headLocked)
            {
                flags |= (uint)OverlayFlag.HeadLocked;
            }

            if (shape == OverlayShape.Cylinder || shape == OverlayShape.Cubemap)
            {
#if UNITY_ANDROID
                if (version >= OVRP_1_7_0.version)
                {
                    flags |= (uint)(shape) << OverlayShapeFlagShift;
                }
                else
#endif
                return(false);
            }

            return(OVRP_1_6_0.ovrp_SetOverlayQuad3(flags, leftTexture, rightTexture, device, pose, scale, layerIndex) == Bool.True);
        }

        if (layerIndex != 0)
        {
            return(false);
        }

        return(OVRP_0_1_1.ovrp_SetOverlayQuad2(ToBool(onTop), ToBool(headLocked), leftTexture, device, pose, scale) == Bool.True);
    }
Exemple #16
0
    public static bool SetOverlayQuad(bool onTop, bool headLocked, IntPtr texture, IntPtr device, Posef pose, Vector3f scale, int layerIndex = 0)
    {
        if (version >= OVRP_1_6_0.version)
        {
            uint flags = (uint)OverlayFlag.None;
            if (onTop)
            {
                flags |= (uint)OverlayFlag.OnTop;
            }
            if (headLocked)
            {
                flags |= (uint)OverlayFlag.HeadLocked;
            }

            return(OVRP_1_6_0.ovrp_SetOverlayQuad3(flags, texture, IntPtr.Zero, device, pose, scale, layerIndex) == Bool.True);
        }

        if (layerIndex != 0)
        {
            return(false);
        }

        if (version >= OVRP_0_1_1.version)
        {
            return(OVRP_0_1_1.ovrp_SetOverlayQuad2(ToBool(onTop), ToBool(headLocked), texture, device, pose, scale) == Bool.True);
        }
        else
        {
            return(OVRP_0_1_0.ovrp_SetOverlayQuad(ToBool(onTop), texture, device, pose, scale) == Bool.True);
        }
    }
Exemple #17
0
 public static extern Bool ovrp_SetOverlayQuad2(Bool onTop, Bool headLocked, IntPtr texture, IntPtr device, Posef pose, Vector3f scale);
Exemple #18
0
 public static extern Bool SetOverlayQuad(Bool onTop, IntPtr texture, IntPtr device, Posef pose, Vector3f scale);
Exemple #19
0
	public static extern Bool SetOverlayQuad(Bool onTop, IntPtr texture, IntPtr device, Posef pose, Vector3f scale);
Exemple #20
0
 public static extern Result ovr_SpecifyTrackingOrigin(ovrSession session, Posef orignPose);
 public static bool SetOverlayQuad(bool onTop, bool headLocked, IntPtr texture, IntPtr device, Posef pose, Vector3f scale)
 {
     if (version >= OVRP_0_1_1.version)
     {
         return(OVRP_0_1_1.ovrp_SetOverlayQuad2(ToBool(onTop), ToBool(headLocked), texture, device, pose, scale) == Bool.True);
     }
     else
     {
         return(OVRP_0_1_0.ovrp_SetOverlayQuad(ToBool(onTop), texture, device, pose, scale) == Bool.True);
     }
 }
Exemple #22
0
        private static void Main()
        {
            RenderForm form = new RenderForm("OculusWrap SharpDX demo");

            IntPtr          sessionPtr;
            InputLayout     inputLayout          = null;
            Buffer          contantBuffer        = null;
            Buffer          vertexBuffer         = null;
            ShaderSignature shaderSignature      = null;
            PixelShader     pixelShader          = null;
            ShaderBytecode  pixelShaderByteCode  = null;
            VertexShader    vertexShader         = null;
            ShaderBytecode  vertexShaderByteCode = null;
            Texture2D       mirrorTextureD3D     = null;

            EyeTexture[]      eyeTextures                = null;
            DeviceContext     immediateContext           = null;
            DepthStencilState depthStencilState          = null;
            DepthStencilView  depthStencilView           = null;
            Texture2D         depthBuffer                = null;
            RenderTargetView  backBufferRenderTargetView = null;
            Texture2D         backBuffer = null;

            SharpDX.DXGI.SwapChain swapChain = null;
            Factory       factory            = null;
            MirrorTexture mirrorTexture      = null;
            Guid          textureInterfaceId = new Guid("6f15aaf2-d208-4e89-9ab4-489535d34f9c");                                                            // Interface ID of the Direct3D Texture2D interface.

            Result result;

            OvrWrap OVR = OvrWrap.Create();

            // Define initialization parameters with debug flag.
            InitParams initializationParameters = new InitParams();

            initializationParameters.Flags = InitFlags.Debug | InitFlags.RequestVersion;
            initializationParameters.RequestedMinorVersion = 17;

            // Initialize the Oculus runtime.
            string errorReason = null;

            try
            {
                result = OVR.Initialize(initializationParameters);

                if (result < Result.Success)
                {
                    errorReason = result.ToString();
                }
            }
            catch (Exception ex)
            {
                errorReason = ex.Message;
            }

            if (errorReason != null)
            {
                MessageBox.Show("Failed to initialize the Oculus runtime library:\r\n" + errorReason, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }

            // Use the head mounted display.
            sessionPtr = IntPtr.Zero;
            var graphicsLuid = new GraphicsLuid();

            result = OVR.Create(ref sessionPtr, ref graphicsLuid);
            if (result < Result.Success)
            {
                MessageBox.Show("The HMD is not enabled: " + result.ToString(), "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }

            var hmdDesc = OVR.GetHmdDesc(sessionPtr);


            try
            {
                // Create a set of layers to submit.
                eyeTextures = new EyeTexture[2];

                // Create DirectX drawing device.
                SharpDX.Direct3D11.Device device = new Device(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.Debug);

                // Create DirectX Graphics Interface factory, used to create the swap chain.
                factory = new SharpDX.DXGI.Factory4();

                immediateContext = device.ImmediateContext;

                // Define the properties of the swap chain.
                SwapChainDescription swapChainDescription = new SwapChainDescription();
                swapChainDescription.BufferCount            = 1;
                swapChainDescription.IsWindowed             = true;
                swapChainDescription.OutputHandle           = form.Handle;
                swapChainDescription.SampleDescription      = new SampleDescription(1, 0);
                swapChainDescription.Usage                  = Usage.RenderTargetOutput | Usage.ShaderInput;
                swapChainDescription.SwapEffect             = SwapEffect.Sequential;
                swapChainDescription.Flags                  = SwapChainFlags.AllowModeSwitch;
                swapChainDescription.ModeDescription.Width  = form.Width;
                swapChainDescription.ModeDescription.Height = form.Height;
                swapChainDescription.ModeDescription.Format = Format.R8G8B8A8_UNorm;
                swapChainDescription.ModeDescription.RefreshRate.Numerator   = 0;
                swapChainDescription.ModeDescription.RefreshRate.Denominator = 1;

                // Create the swap chain.
                swapChain = new SwapChain(factory, device, swapChainDescription);

                // Retrieve the back buffer of the swap chain.
                backBuffer = swapChain.GetBackBuffer <Texture2D>(0);
                backBufferRenderTargetView = new RenderTargetView(device, backBuffer);

                // Create a depth buffer, using the same width and height as the back buffer.
                Texture2DDescription depthBufferDescription = new Texture2DDescription();
                depthBufferDescription.Format            = Format.D32_Float;
                depthBufferDescription.ArraySize         = 1;
                depthBufferDescription.MipLevels         = 1;
                depthBufferDescription.Width             = form.Width;
                depthBufferDescription.Height            = form.Height;
                depthBufferDescription.SampleDescription = new SampleDescription(1, 0);
                depthBufferDescription.Usage             = ResourceUsage.Default;
                depthBufferDescription.BindFlags         = BindFlags.DepthStencil;
                depthBufferDescription.CpuAccessFlags    = CpuAccessFlags.None;
                depthBufferDescription.OptionFlags       = ResourceOptionFlags.None;

                // Define how the depth buffer will be used to filter out objects, based on their distance from the viewer.
                DepthStencilStateDescription depthStencilStateDescription = new DepthStencilStateDescription();
                depthStencilStateDescription.IsDepthEnabled  = true;
                depthStencilStateDescription.DepthComparison = Comparison.Less;
                depthStencilStateDescription.DepthWriteMask  = DepthWriteMask.Zero;

                // Create the depth buffer.
                depthBuffer       = new Texture2D(device, depthBufferDescription);
                depthStencilView  = new DepthStencilView(device, depthBuffer);
                depthStencilState = new DepthStencilState(device, depthStencilStateDescription);

                var viewport = new Viewport(0, 0, hmdDesc.Resolution.Width, hmdDesc.Resolution.Height, 0.0f, 1.0f);

                immediateContext.OutputMerger.SetDepthStencilState(depthStencilState);
                immediateContext.OutputMerger.SetRenderTargets(depthStencilView, backBufferRenderTargetView);
                immediateContext.Rasterizer.SetViewport(viewport);

                // Retrieve the DXGI device, in order to set the maximum frame latency.
                using (SharpDX.DXGI.Device1 dxgiDevice = device.QueryInterface <SharpDX.DXGI.Device1>())
                {
                    dxgiDevice.MaximumFrameLatency = 1;
                }

                var layerEyeFov = new LayerEyeFov();
                layerEyeFov.Header.Type  = LayerType.EyeFov;
                layerEyeFov.Header.Flags = LayerFlags.None;

                for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++)
                {
                    EyeType eye        = (EyeType)eyeIndex;
                    var     eyeTexture = new EyeTexture();
                    eyeTextures[eyeIndex] = eyeTexture;

                    // Retrieve size and position of the texture for the current eye.
                    eyeTexture.FieldOfView           = hmdDesc.DefaultEyeFov[eyeIndex];
                    eyeTexture.TextureSize           = OVR.GetFovTextureSize(sessionPtr, eye, hmdDesc.DefaultEyeFov[eyeIndex], 1.0f);
                    eyeTexture.RenderDescription     = OVR.GetRenderDesc(sessionPtr, eye, hmdDesc.DefaultEyeFov[eyeIndex]);
                    eyeTexture.HmdToEyeViewOffset    = eyeTexture.RenderDescription.HmdToEyePose.Position;
                    eyeTexture.ViewportSize.Position = new Vector2i(0, 0);
                    eyeTexture.ViewportSize.Size     = eyeTexture.TextureSize;
                    eyeTexture.Viewport = new Viewport(0, 0, eyeTexture.TextureSize.Width, eyeTexture.TextureSize.Height, 0.0f, 1.0f);

                    // Define a texture at the size recommended for the eye texture.
                    eyeTexture.Texture2DDescription                   = new Texture2DDescription();
                    eyeTexture.Texture2DDescription.Width             = eyeTexture.TextureSize.Width;
                    eyeTexture.Texture2DDescription.Height            = eyeTexture.TextureSize.Height;
                    eyeTexture.Texture2DDescription.ArraySize         = 1;
                    eyeTexture.Texture2DDescription.MipLevels         = 1;
                    eyeTexture.Texture2DDescription.Format            = Format.R8G8B8A8_UNorm;
                    eyeTexture.Texture2DDescription.SampleDescription = new SampleDescription(1, 0);
                    eyeTexture.Texture2DDescription.Usage             = ResourceUsage.Default;
                    eyeTexture.Texture2DDescription.CpuAccessFlags    = CpuAccessFlags.None;
                    eyeTexture.Texture2DDescription.BindFlags         = BindFlags.ShaderResource | BindFlags.RenderTarget;

                    // Convert the SharpDX texture description to the Oculus texture swap chain description.
                    TextureSwapChainDesc textureSwapChainDesc = SharpDXHelpers.CreateTextureSwapChainDescription(eyeTexture.Texture2DDescription);

                    // Create a texture swap chain, which will contain the textures to render to, for the current eye.
                    IntPtr textureSwapChainPtr;

                    result = OVR.CreateTextureSwapChainDX(sessionPtr, device.NativePointer, ref textureSwapChainDesc, out textureSwapChainPtr);
                    WriteErrorDetails(OVR, result, "Failed to create swap chain.");

                    eyeTexture.SwapTextureSet = new TextureSwapChain(OVR, sessionPtr, textureSwapChainPtr);


                    // Retrieve the number of buffers of the created swap chain.
                    int textureSwapChainBufferCount;
                    result = eyeTexture.SwapTextureSet.GetLength(out textureSwapChainBufferCount);
                    WriteErrorDetails(OVR, result, "Failed to retrieve the number of buffers of the created swap chain.");

                    // Create room for each DirectX texture in the SwapTextureSet.
                    eyeTexture.Textures          = new Texture2D[textureSwapChainBufferCount];
                    eyeTexture.RenderTargetViews = new RenderTargetView[textureSwapChainBufferCount];

                    // Create a texture 2D and a render target view, for each unmanaged texture contained in the SwapTextureSet.
                    for (int textureIndex = 0; textureIndex < textureSwapChainBufferCount; textureIndex++)
                    {
                        // Retrieve the Direct3D texture contained in the Oculus TextureSwapChainBuffer.
                        IntPtr swapChainTextureComPtr = IntPtr.Zero;
                        result = eyeTexture.SwapTextureSet.GetBufferDX(textureIndex, textureInterfaceId, out swapChainTextureComPtr);
                        WriteErrorDetails(OVR, result, "Failed to retrieve a texture from the created swap chain.");

                        // Create a managed Texture2D, based on the unmanaged texture pointer.
                        eyeTexture.Textures[textureIndex] = new Texture2D(swapChainTextureComPtr);

                        // Create a render target view for the current Texture2D.
                        eyeTexture.RenderTargetViews[textureIndex] = new RenderTargetView(device, eyeTexture.Textures[textureIndex]);
                    }

                    // Define the depth buffer, at the size recommended for the eye texture.
                    eyeTexture.DepthBufferDescription                   = new Texture2DDescription();
                    eyeTexture.DepthBufferDescription.Format            = Format.D32_Float;
                    eyeTexture.DepthBufferDescription.Width             = eyeTexture.TextureSize.Width;
                    eyeTexture.DepthBufferDescription.Height            = eyeTexture.TextureSize.Height;
                    eyeTexture.DepthBufferDescription.ArraySize         = 1;
                    eyeTexture.DepthBufferDescription.MipLevels         = 1;
                    eyeTexture.DepthBufferDescription.SampleDescription = new SampleDescription(1, 0);
                    eyeTexture.DepthBufferDescription.Usage             = ResourceUsage.Default;
                    eyeTexture.DepthBufferDescription.BindFlags         = BindFlags.DepthStencil;
                    eyeTexture.DepthBufferDescription.CpuAccessFlags    = CpuAccessFlags.None;
                    eyeTexture.DepthBufferDescription.OptionFlags       = ResourceOptionFlags.None;

                    // Create the depth buffer.
                    eyeTexture.DepthBuffer      = new Texture2D(device, eyeTexture.DepthBufferDescription);
                    eyeTexture.DepthStencilView = new DepthStencilView(device, eyeTexture.DepthBuffer);

                    // Specify the texture to show on the HMD.
                    if (eyeIndex == 0)
                    {
                        layerEyeFov.ColorTextureLeft      = eyeTexture.SwapTextureSet.TextureSwapChainPtr;
                        layerEyeFov.ViewportLeft.Position = new Vector2i(0, 0);
                        layerEyeFov.ViewportLeft.Size     = eyeTexture.TextureSize;
                        layerEyeFov.FovLeft = eyeTexture.FieldOfView;
                    }
                    else
                    {
                        layerEyeFov.ColorTextureRight      = eyeTexture.SwapTextureSet.TextureSwapChainPtr;
                        layerEyeFov.ViewportRight.Position = new Vector2i(0, 0);
                        layerEyeFov.ViewportRight.Size     = eyeTexture.TextureSize;
                        layerEyeFov.FovRight = eyeTexture.FieldOfView;
                    }
                }

                MirrorTextureDesc mirrorTextureDescription = new MirrorTextureDesc();
                mirrorTextureDescription.Format    = TextureFormat.R8G8B8A8_UNorm_SRgb;
                mirrorTextureDescription.Width     = form.Width;
                mirrorTextureDescription.Height    = form.Height;
                mirrorTextureDescription.MiscFlags = TextureMiscFlags.None;

                // Create the texture used to display the rendered result on the computer monitor.
                IntPtr mirrorTexturePtr;
                result = OVR.CreateMirrorTextureDX(sessionPtr, device.NativePointer, ref mirrorTextureDescription, out mirrorTexturePtr);
                WriteErrorDetails(OVR, result, "Failed to create mirror texture.");

                mirrorTexture = new MirrorTexture(OVR, sessionPtr, mirrorTexturePtr);


                // Retrieve the Direct3D texture contained in the Oculus MirrorTexture.
                IntPtr mirrorTextureComPtr = IntPtr.Zero;
                result = mirrorTexture.GetBufferDX(textureInterfaceId, out mirrorTextureComPtr);
                WriteErrorDetails(OVR, result, "Failed to retrieve the texture from the created mirror texture buffer.");

                // Create a managed Texture2D, based on the unmanaged texture pointer.
                mirrorTextureD3D = new Texture2D(mirrorTextureComPtr);

                #region Vertex and pixel shader
                // Create vertex shader.
                vertexShaderByteCode = ShaderBytecode.CompileFromFile("Shaders.fx", "VertexShaderPositionColor", "vs_4_0");
                vertexShader         = new VertexShader(device, vertexShaderByteCode);

                // Create pixel shader.
                pixelShaderByteCode = ShaderBytecode.CompileFromFile("Shaders.fx", "PixelShaderPositionColor", "ps_4_0");
                pixelShader         = new PixelShader(device, pixelShaderByteCode);

                shaderSignature = ShaderSignature.GetInputSignature(vertexShaderByteCode);

                // Specify that each vertex consists of a single vertex position and color.
                InputElement[] inputElements = new InputElement[]
                {
                    new InputElement("POSITION", 0, Format.R32G32B32A32_Float, 0, 0),
                    new InputElement("COLOR", 0, Format.R32G32B32A32_Float, 16, 0)
                };

                // Define an input layout to be passed to the vertex shader.
                inputLayout = new InputLayout(device, shaderSignature, inputElements);

                // Create a vertex buffer, containing our 3D model.
                vertexBuffer = Buffer.Create(device, BindFlags.VertexBuffer, m_vertices);

                // Create a constant buffer, to contain our WorldViewProjection matrix, that will be passed to the vertex shader.
                contantBuffer = new Buffer(device, Utilities.SizeOf <Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);

                // Setup the immediate context to use the shaders and model we defined.
                immediateContext.InputAssembler.InputLayout       = inputLayout;
                immediateContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList;
                immediateContext.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBuffer, sizeof(float) * 4 * 2, 0));
                immediateContext.VertexShader.SetConstantBuffer(0, contantBuffer);
                immediateContext.VertexShader.Set(vertexShader);
                immediateContext.PixelShader.Set(pixelShader);
                #endregion

                DateTime startTime = DateTime.Now;
                Vector3  position  = new Vector3(0, 0, -1);

                #region Render loop
                RenderLoop.Run(form, () =>
                {
                    Vector3f[] hmdToEyeViewOffsets = { eyeTextures[0].HmdToEyeViewOffset, eyeTextures[1].HmdToEyeViewOffset };
                    double displayMidpoint         = OVR.GetPredictedDisplayTime(sessionPtr, 0);
                    TrackingState trackingState    = OVR.GetTrackingState(sessionPtr, displayMidpoint, true);
                    Posef[] eyePoses = new Posef[2];

                    // Calculate the position and orientation of each eye.
                    OVR.CalcEyePoses(trackingState.HeadPose.ThePose, hmdToEyeViewOffsets, ref eyePoses);

                    float timeSinceStart = (float)(DateTime.Now - startTime).TotalSeconds;

                    for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++)
                    {
                        EyeType eye           = (EyeType)eyeIndex;
                        EyeTexture eyeTexture = eyeTextures[eyeIndex];

                        if (eyeIndex == 0)
                        {
                            layerEyeFov.RenderPoseLeft = eyePoses[0];
                        }
                        else
                        {
                            layerEyeFov.RenderPoseRight = eyePoses[1];
                        }

                        // Update the render description at each frame, as the HmdToEyeOffset can change at runtime.
                        eyeTexture.RenderDescription = OVR.GetRenderDesc(sessionPtr, eye, hmdDesc.DefaultEyeFov[eyeIndex]);

                        // Retrieve the index of the active texture
                        int textureIndex;
                        result = eyeTexture.SwapTextureSet.GetCurrentIndex(out textureIndex);
                        WriteErrorDetails(OVR, result, "Failed to retrieve texture swap chain current index.");

                        immediateContext.OutputMerger.SetRenderTargets(eyeTexture.DepthStencilView, eyeTexture.RenderTargetViews[textureIndex]);
                        immediateContext.ClearRenderTargetView(eyeTexture.RenderTargetViews[textureIndex], Color.Black);
                        immediateContext.ClearDepthStencilView(eyeTexture.DepthStencilView, DepthStencilClearFlags.Depth | DepthStencilClearFlags.Stencil, 1.0f, 0);
                        immediateContext.Rasterizer.SetViewport(eyeTexture.Viewport);

                        // Retrieve the eye rotation quaternion and use it to calculate the LookAt direction and the LookUp direction.
                        Quaternion rotationQuaternion = SharpDXHelpers.ToQuaternion(eyePoses[eyeIndex].Orientation);
                        Matrix rotationMatrix         = Matrix.RotationQuaternion(rotationQuaternion);
                        Vector3 lookUp = Vector3.Transform(new Vector3(0, -1, 0), rotationMatrix).ToVector3();
                        Vector3 lookAt = Vector3.Transform(new Vector3(0, 0, 1), rotationMatrix).ToVector3();

                        Vector3 viewPosition = position - eyePoses[eyeIndex].Position.ToVector3();

                        Matrix world      = Matrix.Scaling(0.1f) * Matrix.RotationX(timeSinceStart / 10f) * Matrix.RotationY(timeSinceStart * 2 / 10f) * Matrix.RotationZ(timeSinceStart * 3 / 10f);
                        Matrix viewMatrix = Matrix.LookAtLH(viewPosition, viewPosition + lookAt, lookUp);

                        Matrix projectionMatrix = OVR.Matrix4f_Projection(eyeTexture.FieldOfView, 0.1f, 100.0f, ProjectionModifier.LeftHanded).ToMatrix();
                        projectionMatrix.Transpose();

                        Matrix worldViewProjection = world * viewMatrix * projectionMatrix;
                        worldViewProjection.Transpose();

                        // Update the transformation matrix.
                        immediateContext.UpdateSubresource(ref worldViewProjection, contantBuffer);

                        // Draw the cube
                        immediateContext.Draw(m_vertices.Length / 2, 0);

                        // Commits any pending changes to the TextureSwapChain, and advances its current index
                        result = eyeTexture.SwapTextureSet.Commit();
                        WriteErrorDetails(OVR, result, "Failed to commit the swap chain texture.");
                    }


                    result = OVR.SubmitFrame(sessionPtr, 0L, IntPtr.Zero, ref layerEyeFov);
                    WriteErrorDetails(OVR, result, "Failed to submit the frame of the current layers.");

                    immediateContext.CopyResource(mirrorTextureD3D, backBuffer);
                    swapChain.Present(0, PresentFlags.None);
                });
                #endregion
            }
            finally
            {
                if (immediateContext != null)
                {
                    immediateContext.ClearState();
                    immediateContext.Flush();
                }

                // Release all resources
                Dispose(inputLayout);
                Dispose(contantBuffer);
                Dispose(vertexBuffer);
                Dispose(shaderSignature);
                Dispose(pixelShader);
                Dispose(pixelShaderByteCode);
                Dispose(vertexShader);
                Dispose(vertexShaderByteCode);
                Dispose(mirrorTextureD3D);
                Dispose(mirrorTexture);
                Dispose(eyeTextures[0]);
                Dispose(eyeTextures[1]);
                Dispose(immediateContext);
                Dispose(depthStencilState);
                Dispose(depthStencilView);
                Dispose(depthBuffer);
                Dispose(backBufferRenderTargetView);
                Dispose(backBuffer);
                Dispose(swapChain);
                Dispose(factory);

                // Disposing the device, before the hmd, will cause the hmd to fail when disposing.
                // Disposing the device, after the hmd, will cause the dispose of the device to fail.
                // It looks as if the hmd steals ownership of the device and destroys it, when it's shutting down.
                // device.Dispose();
                OVR.Destroy(sessionPtr);
            }
        }
Exemple #23
0
    public static bool SetOverlayQuad(bool onTop, bool headLocked, IntPtr texture, IntPtr device, Posef pose, Vector3f scale, int layerIndex=0, OverlayShape shape=OverlayShape.Quad)
    {
        if (version >= OVRP_1_6_0.version)
        {
            uint flags = (uint)OverlayFlag.None;
            if (onTop)
                flags |= (uint)OverlayFlag.OnTop;
            if (headLocked)
                flags |= (uint)OverlayFlag.HeadLocked;

            if (shape == OverlayShape.Cylinder || shape == OverlayShape.Cubemap)
            {
        #if UNITY_ANDROID
                if (version >= OVRP_1_7_0.version)
                    flags |= (uint)(shape) << OverlayShapeFlagShift;
                else
        #endif
                    return false;
            }
            return OVRP_1_6_0.ovrp_SetOverlayQuad3(flags, texture, IntPtr.Zero, device, pose, scale, layerIndex) == Bool.True;
        }

        if (layerIndex != 0)
            return false;

            return OVRP_0_1_1.ovrp_SetOverlayQuad2(ToBool(onTop), ToBool(headLocked), texture, device, pose, scale) == Bool.True;
    }
Exemple #24
0
 static string edit_pose(Posef p)
 {
     return("( " + p.w + ", " + p.x + ", " + p.y + ", " + p.z
            + ", " + p.px + ", " + p.py + ", " + p.pz + " )");
 }
Exemple #25
0
 public static extern void ovrPosef_FlipHandedness(ref Posef inPose, ref Posef outPose);
Exemple #26
0
 public static extern void ovr_CalcEyePoses(Posef headPose, Vector3[] hmdToEyeOffset, [Out] Posef[] outEyePoses);
Exemple #27
0
 public partial Result SetInputDeviceLocation([Count(Count = 0)] Session session, [Count(Count = 0)] ulong topLevelPath, [Count(Count = 0)] ulong inputSourcePath, [Count(Count = 0)] Space space, [Count(Count = 0)] Posef pose);
Exemple #28
0
        public void Session_SubmitFrame()
        {
            IntPtr sessionPtr = CreateSession();

            Assert.AreNotEqual(IntPtr.Zero, sessionPtr);

            // Define field of view (This is used for both left and right eye).
            FovPort fieldOfView = new FovPort();

            fieldOfView.DownTan  = (float)Math.Tan(0.523598776);              // 0.523598776 radians = 30 degrees.
            fieldOfView.UpTan    = (float)Math.Tan(0.523598776);              // 0.523598776 radians = 30 degrees.
            fieldOfView.LeftTan  = (float)Math.Tan(0.785398163);              // 0.785398163 radians = 45 degrees.
            fieldOfView.RightTan = (float)Math.Tan(0.785398163);              // 0.785398163 radians = 45 degrees.

            EyeRenderDesc renderDescLeft  = OVR.GetRenderDesc(sessionPtr, EyeType.Left, fieldOfView);
            EyeRenderDesc renderDescRight = OVR.GetRenderDesc(sessionPtr, EyeType.Left, fieldOfView);

            var viewScaleDesc = new ViewScaleDesc();

            viewScaleDesc.HmdToEyePose0 = renderDescLeft.HmdToEyePose;
            viewScaleDesc.HmdToEyePose1 = renderDescRight.HmdToEyePose;
            viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1;

            // Determine texture size matching the field of view.
            Sizei sizeLeft  = OVR.GetFovTextureSize(sessionPtr, EyeType.Left, fieldOfView, 1.0f);
            Sizei sizeRight = OVR.GetFovTextureSize(sessionPtr, EyeType.Right, fieldOfView, 1.0f);

            var    hmdToEyeViewOffset = new Vector3f[2];
            var    poses = new Posef[2];
            double sensorSampleTime;

            hmdToEyeViewOffset[0].X = -0.1f;
            hmdToEyeViewOffset[1].X = 0.1f;

            OVR.GetEyePoses(sessionPtr, 0, true, hmdToEyeViewOffset, ref poses, out sensorSampleTime);

            // Create a set of layers to submit.
            LayerEyeFov layer = new LayerEyeFov();

            layer.Header.Type = LayerType.EyeFov;

            Result result;

            using (TestEngine testEngine = CreateTestEngine(sessionPtr))
            {
                try
                {
                    // Create a texture for the left eye.
                    layer.ColorTextureLeft      = CreateTextureSwapChain(sessionPtr, testEngine);
                    layer.ViewportLeft.Position = new Vector2i(0, 0);
                    layer.ViewportLeft.Size     = sizeLeft;
                    layer.FovLeft        = fieldOfView;
                    layer.RenderPoseLeft = poses[0];

                    // Create a texture for the right eye.
                    layer.ColorTextureRight      = CreateTextureSwapChain(sessionPtr, testEngine);
                    layer.ViewportRight.Position = new Vector2i(0, 0);
                    layer.ViewportRight.Size     = sizeLeft;
                    layer.FovRight        = fieldOfView;
                    layer.RenderPoseRight = poses[1];


                    // The created texture swap chain must be committed to the Oculus SDK, before using it in the
                    // call to ovr_SubmitFrame, otherwise ovr_SubmitFrame will fail.
                    result = OVR.CommitTextureSwapChain(sessionPtr, layer.ColorTextureLeft);
                    Assert.IsTrue(result >= Result.Success);

                    result = OVR.CommitTextureSwapChain(sessionPtr, layer.ColorTextureRight);
                    Assert.IsTrue(result >= Result.Success);


                    // SubmitFrame requires pointer to an array of pointers to Layer objects
                    var layerPointers = new IntPtr[1];

                    GCHandle layerHandle         = GCHandle.Alloc(layer, GCHandleType.Pinned);
                    GCHandle layerPointersHandle = GCHandle.Alloc(layerPointers, GCHandleType.Pinned);

                    layerPointers[0] = layerHandle.AddrOfPinnedObject();

                    result = OVR.SubmitFrame(sessionPtr, 0L, IntPtr.Zero, layerPointersHandle.AddrOfPinnedObject(), 1);
                    Assert.IsTrue(result >= Result.Success);

                    layerPointersHandle.Free();
                    layerHandle.Free();
                }
                finally
                {
                    if (layer.ColorTextureLeft != IntPtr.Zero)
                    {
                        OVR.DestroyTextureSwapChain(sessionPtr, layer.ColorTextureLeft);
                    }

                    if (layer.ColorTextureRight != IntPtr.Zero)
                    {
                        OVR.DestroyTextureSwapChain(sessionPtr, layer.ColorTextureRight);
                    }
                }
            }
        }
Exemple #29
-1
	public static bool SetOverlayQuad(bool onTop, bool headLocked, IntPtr texture, IntPtr device, Posef pose, Vector3f scale)
	{
		if (version >= OVRP_0_1_1.version)
			return OVRP_0_1_1.ovrp_SetOverlayQuad2(ToBool(onTop), ToBool(headLocked), texture, device, pose, scale) == Bool.True;
		else
			return OVRP_0_1_0.ovrp_SetOverlayQuad(ToBool(onTop), texture, device, pose, scale) == Bool.True;
	}
Exemple #30
-1
 public static extern Bool ovrp_SetOverlayQuad3(uint flags, IntPtr textureLeft, IntPtr textureRight, IntPtr device, Posef pose, Vector3f scale, int layerIndex);
Exemple #31
-1
		public static extern Bool ovrp_SetOverlayQuad2(Bool onTop, Bool headLocked, IntPtr texture, IntPtr device, Posef pose, Vector3f scale);