예제 #1
0
        public void Session_GetFovTextureSize()
        {
            IntPtr sessionPtr = CreateSession();

            Assert.AreNotEqual(IntPtr.Zero, sessionPtr);

            FovPort fieldOfView = new FovPort();

            fieldOfView.DownTan  = (float)Math.Tan(0.523598776);                                 // 0.523598776 radians = 30 degrees.
            fieldOfView.UpTan    = (float)Math.Tan(0.523598776);                                 // 0.523598776 radians = 30 degrees.
            fieldOfView.LeftTan  = (float)Math.Tan(0.785398163);                                 // 0.785398163 radians = 45 degrees.
            fieldOfView.RightTan = (float)Math.Tan(0.785398163);                                 // 0.785398163 radians = 45 degrees.

            Sizei size = OVR.GetFovTextureSize(sessionPtr, EyeType.Left, fieldOfView, 1.0f);

            //Assert.AreEqual(1099, size.Width);
            //Assert.AreEqual(635, size.Height);

            //Assert.AreEqual(1586, size.Width);
            //Assert.AreEqual(915, size.Height);

            // The following is reported on 1.17:
            Assert.AreEqual(1600, size.Width);
            Assert.AreEqual(928, size.Height);
        }
예제 #2
0
    private void ConfigureEyeDesc(OVREye eye)
    {
#if !UNITY_ANDROID || UNITY_EDITOR
        HmdDesc desc = OVRManager.capiHmd.GetDesc();
        FovPort fov  = desc.DefaultEyeFov[(int)eye];
        fov.LeftTan = fov.RightTan = Mathf.Max(fov.LeftTan, fov.RightTan);
        fov.UpTan   = fov.DownTan = Mathf.Max(fov.UpTan, fov.DownTan);

        // Configure Stereo settings. Default pixel density is one texel per pixel.
        float desiredPixelDensity = 1f;
        Sizei texSize             = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fov, desiredPixelDensity);

        float fovH = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.LeftTan);
        float fovV = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.UpTan);

        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = texSize.ToVector2(),
            fov        = new Vector2(fovH, fovV)
        };
#else
        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = new Vector2(1024, 1024),
            fov        = new Vector2(90, 90)
        };
#endif
    }
예제 #3
0
    private void ConfigureEyeDesc(OVREye eye)
    {
        Vector2 texSize = Vector2.zero;
        Vector2 fovSize = Vector2.zero;

#if !UNITY_ANDROID || UNITY_EDITOR
        if (!OVRManager.instance.isVRPresent)
        {
            return;
        }

        FovPort fovPort = OVRManager.capiHmd.GetDesc().DefaultEyeFov[(int)eye];
        fovPort.LeftTan = fovPort.RightTan = Mathf.Max(fovPort.LeftTan, fovPort.RightTan);
        fovPort.UpTan   = fovPort.DownTan = Mathf.Max(fovPort.UpTan, fovPort.DownTan);

        texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fovPort, OVRManager.instance.nativeTextureScale).ToVector2();
        fovSize = new Vector2(2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.LeftTan), 2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.UpTan));
#else
        texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale;
        fovSize = new Vector2(90, 90);
#endif

        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = texSize,
            fov        = fovSize
        };
    }
예제 #4
0
 /// <summary>
 /// Creates a new LayerEyeFov.
 /// </summary>
 public LayerEyeFov()
 {
     Header       = new LayerHeader();
     Header.Type  = LayerType.EyeFov;
     ColorTexture = new IntPtr[2];
     Viewport     = new Recti[2];
     Fov          = new FovPort[2];
     RenderPose   = new Posef[2];
 }
예제 #5
0
        public static Matrix4 ovrMatrix4f_Projection(FovPort vof, float znear, float zfar, ProjectionModifier projectionModFlags)
        {
            Matrix4 ret = _ovrMatrix4f_Projection(vof, znear, zfar, projectionModFlags);

            //the matrix layout of ovr is row major, but openGL wants column major
            ret.Transpose();

            return(ret);
        }
예제 #6
0
    /// <summary>
    /// Gets the given eye's projection matrix.
    /// </summary>
    /// <param name="eyeId">Specifies the eye.</param>
    /// <param name="nearClip">The distance to the near clipping plane.</param>
    /// <param name="farClip">The distance to the far clipping plane.</param>
    public Matrix4x4 GetProjection(int eyeId, float nearClip, float farClip)
    {
#if !UNITY_ANDROID || UNITY_EDITOR
        FovPort fov = OVRManager.capiHmd.GetDesc().DefaultEyeFov[eyeId];

        return(Hmd.GetProjection(fov, nearClip, farClip, true).ToMatrix4x4());
#else
        return(new Matrix4x4());
#endif
    }
예제 #7
0
    /// <summary>
    /// Gets the given eye's projection matrix.
    /// </summary>
    /// <param name="eyeId">Specifies the eye.</param>
    /// <param name="nearClip">The distance to the near clipping plane.</param>
    /// <param name="farClip">The distance to the far clipping plane.</param>
    public Matrix4x4 GetProjection(int eyeId, float nearClip, float farClip)
    {
#if !UNITY_ANDROID || UNITY_EDITOR
        FovPort fov = OVRManager.capiHmd.GetDesc().DefaultEyeFov[eyeId];

        uint projectionModFlags = (uint)Hmd.ProjectionModifier.RightHanded;

        return(Hmd.GetProjection(fov, nearClip, farClip, projectionModFlags).ToMatrix4x4());
#else
        return(new Matrix4x4());
#endif
    }
예제 #8
0
        public void ovrMatrix4f_Projection()
        {
            // Define field of view (This is used for both left and right eye).
            FovPort fieldOfView = new FovPort();

            fieldOfView.DownTan  = (float)Math.Tan(0.523598776);                 // 0.523598776 radians = 30 degrees.
            fieldOfView.UpTan    = (float)Math.Tan(0.523598776);                 // 0.523598776 radians = 30 degrees.
            fieldOfView.LeftTan  = (float)Math.Tan(0.785398163);                 // 0.785398163 radians = 45 degrees.
            fieldOfView.RightTan = (float)Math.Tan(0.785398163);                 // 0.785398163 radians = 45 degrees.

            // Create left handed projection matrix.
            Matrix4f matrix = OVR.Matrix4f_Projection(fieldOfView, 0.1f, 1, 0);

            Assert.IsTrue(matrix.M11 != 0);
            Assert.IsTrue(matrix.M22 != 0);
            Assert.IsTrue(matrix.M33 != 0);
        }
예제 #9
0
        public void TimewarpProjectionDesc_FromProjection()
        {
            // Define field of view (This is used for both left and right eye).
            FovPort fieldOfView = new FovPort();

            fieldOfView.DownTan  = (float)Math.Tan(0.523598776);                 // 0.523598776 radians = 30 degrees.
            fieldOfView.UpTan    = (float)Math.Tan(0.523598776);                 // 0.523598776 radians = 30 degrees.
            fieldOfView.LeftTan  = (float)Math.Tan(0.785398163);                 // 0.785398163 radians = 45 degrees.
            fieldOfView.RightTan = (float)Math.Tan(0.785398163);                 // 0.785398163 radians = 45 degrees.

            // Create left handed projection matrix, used to create a timewarp projection description.
            Matrix4f matrix = OVR.Matrix4f_Projection(fieldOfView, 0.1f, 1, 0);

            TimewarpProjectionDesc timewarpProjectionDesc = OVR.TimewarpProjectionDesc_FromProjection(matrix, ProjectionModifier.None);

            Assert.IsTrue(timewarpProjectionDesc.Projection22 != 0);
            Assert.IsTrue(timewarpProjectionDesc.Projection23 != 0);
            Assert.IsTrue(timewarpProjectionDesc.Projection32 != 0);
        }
예제 #10
0
        public void Session_GetRenderDesc()
        {
            IntPtr sessionPtr = CreateSession();

            Assert.AreNotEqual(IntPtr.Zero, sessionPtr);

            // Define field of view (This is used for both left and right eye).
            FovPort fieldOfView = new FovPort();

            fieldOfView.DownTan  = (float)Math.Tan(0.523598776);                                 // 0.523598776 radians = 30 degrees.
            fieldOfView.UpTan    = (float)Math.Tan(0.523598776);                                 // 0.523598776 radians = 30 degrees.
            fieldOfView.LeftTan  = (float)Math.Tan(0.785398163);                                 // 0.785398163 radians = 45 degrees.
            fieldOfView.RightTan = (float)Math.Tan(0.785398163);                                 // 0.785398163 radians = 45 degrees.

            EyeRenderDesc renderDesc = OVR.GetRenderDesc(sessionPtr, EyeType.Left, fieldOfView);

            Assert.AreEqual(renderDesc.Fov, renderDesc.Fov);

            // Test that the GetRenderDesc is returning the correct eye position (or that the workaround is working)
            Assert.AreNotEqual(renderDesc.HmdToEyePose.Position.X, 0.0f);
        }
예제 #11
0
        /// <summary>
        /// Creates a simple Direct3D graphics engine, used in unit tests.
        /// </summary>
        /// <param name="session">Existing session used to retrieve the size of the test engine.</param>
        /// <returns>Created test engine.</returns>
        /// <remarks>Remember to dispose the created test engine, after use.</remarks>
        private TestEngine CreateTestEngine(IntPtr session)
        {
            // Define field of view (This is used for both left and right eye).
            FovPort fieldOfView = new FovPort();

            fieldOfView.DownTan  = (float)Math.Tan(0.523598776);                 // 0.523598776 radians = 30 degrees.
            fieldOfView.UpTan    = (float)Math.Tan(0.523598776);                 // 0.523598776 radians = 30 degrees.
            fieldOfView.LeftTan  = (float)Math.Tan(0.785398163);                 // 0.785398163 radians = 45 degrees.
            fieldOfView.RightTan = (float)Math.Tan(0.785398163);                 // 0.785398163 radians = 45 degrees.

            EyeRenderDesc renderDescLeft  = OVR.GetRenderDesc(session, EyeType.Left, fieldOfView);
            EyeRenderDesc renderDescRight = OVR.GetRenderDesc(session, EyeType.Left, fieldOfView);

            // Determine texture size matching the field of view.
            Sizei sizeLeft  = OVR.GetFovTextureSize(session, EyeType.Left, fieldOfView, 1.0f);
            Sizei sizeRight = OVR.GetFovTextureSize(session, EyeType.Right, fieldOfView, 1.0f);

            TestEngine testEngine = new TestEngine(sizeLeft.Width + sizeRight.Width, sizeLeft.Height);

            return(testEngine);
        }
예제 #12
0
 public static extern EyeRenderDesc ovr_GetRenderDesc(ovrSession session, EyeType eye, FovPort fov);
예제 #13
0
 public static extern Sizei ovr_GetFovTextureSize(ovrSession session, EyeType eye, FovPort vof, float pixelsPerDisplayPixel);
예제 #14
0
        /// <summary>
        /// UpdateRenderingContext is called from the BeginVirtualRealityRenderingStep and should update the properties in the RenderingContext according to the current eye.
        /// See remarks for more info about the usual tasks that are preformed in this method.
        /// </summary>
        /// <remarks>
        /// <para>
        /// <b>UpdateRenderingContext</b> is called from the BeginVirtualRealityRenderingStep and should update the properties in the RenderingContext according to the current eye.
        /// </para>
        /// <para>
        /// This method is usually called from the <see cref="BeginVirtualRealityRenderingStep"/> (when the virtual reality provider is enabled).
        /// </para>
        /// <para>
        /// Usually this method does the following:<br/>
        /// - Creates the back buffers and views that are needed for rendering
        /// (the back buffers are also recreated if the size of <see cref="RenderingContext.CurrentBackBufferDescription"/> is different from the size of previously created back buffers).<br/>
        /// - Updates the <see cref="VirtualRealityContext.CurrentEye"/> property.<br/>
        /// - Sets the <see cref="RenderingContext.UsedCamera"/> property to a <see cref="StereoscopicCamera"/><br/>
        /// - Calls <see cref="RenderingContext.SetBackBuffer"/> method and sets the new back buffers.<br/>
        /// - Updates <see cref="ResolveMultisampledBackBufferRenderingStep.DestinationBuffer"/> on the <see cref="DXScene.DefaultResolveMultisampledBackBufferRenderingStep"/> and sets it to the eye texture.
        /// </para>
        /// </remarks>
        /// <param name="renderingContext">RenderingContext</param>
        /// <param name="isNewFrame">true if no eye was yet rendered for the current frame; false if the first eye was already rendered for the current frame and we need to render the second eye</param>
        public override void UpdateRenderingContext(RenderingContext renderingContext, bool isNewFrame)
        {
            // This code is executed inside BeginVirtualRealityRenderingStep before all the objects are rendered.

            // Base method does:
            // - sets the virtualRealityContext.CurrentEye based on the isNewFrame parameter: isNewFrame == true => LeftEye else RightEye
            // - ensures that stereoscopicCamera is created and sets its properties for the current eye and based on the current EyeSeparation, Parallax and InvertLeftRightView
            // - sets renderingContext.UsedCamera = stereoscopicCamera
            base.UpdateRenderingContext(renderingContext, isNewFrame);


            var virtualRealityContext = renderingContext.VirtualRealityContext;

            Eye     currentEye = virtualRealityContext.CurrentEye;
            EyeType ovrEye     = currentEye == Eye.Left ? EyeType.Left : EyeType.Right;

            int eyeIndex = currentEye == Eye.Left ? 0 : 1;


            FovPort defaultEyeFov = _hmdDesc.DefaultEyeFov[eyeIndex];
            var     idealSize     = _ovr.GetFovTextureSize(_sessionPtr, ovrEye, defaultEyeFov, _pixelsPerDisplayPixel);

            // When we render this frame for the first time
            // we also check that all the required resources are created
            // Check if we need to create or recreate the RenderTargetViews and DepthStencilViews
            if (isNewFrame &&
                (_eyeTextureSwapChains[eyeIndex] == null ||
                 _eyeTextureSwapChains[eyeIndex].Size.Width != idealSize.Width ||
                 _eyeTextureSwapChains[eyeIndex].Size.Height != idealSize.Height))
            {
                CreateResources(renderingContext.DXScene);
            }

            if (isNewFrame)
            {
                _ovr.GetEyePoses(_sessionPtr, 0L, true, _hmdToEyeOffset, ref _eyePoses, out _sensorSampleTime);
            }

            var camera = renderingContext.DXScene.Camera;


            // From OculusRoomTiny main.cpp #221

            //Get the pose information
            var eyeQuat = SharpDXHelpers.ToQuaternion(_eyePoses[eyeIndex].Orientation);
            var eyePos  = SharpDXHelpers.ToVector3(_eyePoses[eyeIndex].Position);

            // Get view and projection matrices for the Rift camera
            Vector3 cameraPosition       = camera.GetCameraPosition();
            Matrix  cameraRotationMatrix = camera.View;

            cameraRotationMatrix.M41 = 0; // Remove translation
            cameraRotationMatrix.M42 = 0;
            cameraRotationMatrix.M43 = 0;

            cameraRotationMatrix.Invert(); // Invert to get rotation matrix

            Vector4 rotatedEyePos4 = Vector3.Transform(eyePos, cameraRotationMatrix);
            var     rotatedEyePos  = new Vector3(rotatedEyePos4.X, rotatedEyePos4.Y, rotatedEyePos4.Z);

            var finalCameraPosition = cameraPosition + rotatedEyePos;

            var eyeQuaternionMatrix = Matrix.RotationQuaternion(eyeQuat);
            var finalRotationMatrix = eyeQuaternionMatrix * cameraRotationMatrix;

            Vector4 lookDirection4 = Vector3.Transform(new Vector3(0, 0, -1), finalRotationMatrix);
            var     lookDirection  = new Vector3(lookDirection4.X, lookDirection4.Y, lookDirection4.Z);

            Vector4 upDirection4 = Vector3.Transform(Vector3.UnitY, finalRotationMatrix);
            var     upDirection  = new Vector3(upDirection4.X, upDirection4.Y, upDirection4.Z);

            var viewMatrix = Matrix.LookAtRH(finalCameraPosition, finalCameraPosition + lookDirection, upDirection);



            float zNear = camera.NearPlaneDistance;
            float zFar  = camera.FarPlaneDistance;

            if (zNear < 0.05f)
            {
                zNear = 0.05f;
            }

            zFar *= 1.2f; // increase the zFar - the FarPlaneDistance is not exactly correct because the camera can be higher because the eye's Position can be over the Camera's position

            //zNear = 0.1f;
            //zFar = 100;

            var eyeRenderDesc = _ovr.GetRenderDesc(_sessionPtr, ovrEye, _hmdDesc.DefaultEyeFov[eyeIndex]);

            var projectionMatrix = _ovr.Matrix4f_Projection(eyeRenderDesc.Fov, zNear, zFar, ProjectionModifier.None).ToMatrix();

            projectionMatrix.Transpose();

            _matrixCamera.Projection = projectionMatrix;
            _matrixCamera.View       = viewMatrix;
            _matrixCamera.SetCameraPosition(finalCameraPosition);

            renderingContext.UsedCamera = _matrixCamera;


            // Change the current viewport
            renderingContext.CurrentViewport = _eyeTextureSwapChains[eyeIndex].Viewport;
            renderingContext.DeviceContext.Rasterizer.SetViewport(renderingContext.CurrentViewport);

            if (_msaaBackBuffer == null)
            {
                renderingContext.SetBackBuffer(backBuffer: _eyeTextureSwapChains[eyeIndex].CurrentTexture,
                                               backBufferDescription: _eyeTextureSwapChains[eyeIndex].CurrentTextureDescription,
                                               renderTargetView: _eyeTextureSwapChains[eyeIndex].CurrentRTView,
                                               depthStencilView: _eyeTextureSwapChains[eyeIndex].CurrentDepthStencilView,
                                               bindNewRenderTargetsToDeviceContext: false); // Do not bind new buffers because this is done in the next rendering step - PrepareRenderTargetsRenderingStep
            }
            else
            {
                // MSAA
                renderingContext.SetBackBuffer(backBuffer: _msaaBackBuffer,
                                               backBufferDescription: _msaaBackBufferDescription,
                                               renderTargetView: _msaaBackBufferRenderTargetView,
                                               depthStencilView: _msaaDepthStencilView,
                                               bindNewRenderTargetsToDeviceContext: false); // Do not bind new buffers because this is done in the next rendering step - PrepareRenderTargetsRenderingStep

                renderingContext.DXScene.DefaultResolveMultisampledBackBufferRenderingStep.DestinationBuffer = _eyeTextureSwapChains[eyeIndex].CurrentTexture;
            }


            // When we render this frame for the first time set the NewViewport on the ChangeBackBufferRenderingStep to resets the Viewport from split screen viewport to the final full screen viewport
            if (isNewFrame && _resetViewportRenderingStep != null)
            {
                int backBufferWidth  = renderingContext.FinalBackBufferDescription.Width;
                int backBufferHeight = renderingContext.FinalBackBufferDescription.Height;

                _resetViewportRenderingStep.NewViewport = new ViewportF(0, 0, backBufferWidth, backBufferHeight);
            }
        }
예제 #15
0
        public void Session_SubmitFrame()
        {
            IntPtr sessionPtr = CreateSession();

            Assert.AreNotEqual(IntPtr.Zero, sessionPtr);

            // Define field of view (This is used for both left and right eye).
            FovPort fieldOfView = new FovPort();

            fieldOfView.DownTan  = (float)Math.Tan(0.523598776);              // 0.523598776 radians = 30 degrees.
            fieldOfView.UpTan    = (float)Math.Tan(0.523598776);              // 0.523598776 radians = 30 degrees.
            fieldOfView.LeftTan  = (float)Math.Tan(0.785398163);              // 0.785398163 radians = 45 degrees.
            fieldOfView.RightTan = (float)Math.Tan(0.785398163);              // 0.785398163 radians = 45 degrees.

            EyeRenderDesc renderDescLeft  = OVR.GetRenderDesc(sessionPtr, EyeType.Left, fieldOfView);
            EyeRenderDesc renderDescRight = OVR.GetRenderDesc(sessionPtr, EyeType.Left, fieldOfView);

            var viewScaleDesc = new ViewScaleDesc();

            viewScaleDesc.HmdToEyePose0 = renderDescLeft.HmdToEyePose;
            viewScaleDesc.HmdToEyePose1 = renderDescRight.HmdToEyePose;
            viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1;

            // Determine texture size matching the field of view.
            Sizei sizeLeft  = OVR.GetFovTextureSize(sessionPtr, EyeType.Left, fieldOfView, 1.0f);
            Sizei sizeRight = OVR.GetFovTextureSize(sessionPtr, EyeType.Right, fieldOfView, 1.0f);

            var    hmdToEyeViewOffset = new Vector3f[2];
            var    poses = new Posef[2];
            double sensorSampleTime;

            hmdToEyeViewOffset[0].X = -0.1f;
            hmdToEyeViewOffset[1].X = 0.1f;

            OVR.GetEyePoses(sessionPtr, 0, true, hmdToEyeViewOffset, ref poses, out sensorSampleTime);

            // Create a set of layers to submit.
            LayerEyeFov layer = new LayerEyeFov();

            layer.Header.Type = LayerType.EyeFov;

            Result result;

            using (TestEngine testEngine = CreateTestEngine(sessionPtr))
            {
                try
                {
                    // Create a texture for the left eye.
                    layer.ColorTextureLeft      = CreateTextureSwapChain(sessionPtr, testEngine);
                    layer.ViewportLeft.Position = new Vector2i(0, 0);
                    layer.ViewportLeft.Size     = sizeLeft;
                    layer.FovLeft        = fieldOfView;
                    layer.RenderPoseLeft = poses[0];

                    // Create a texture for the right eye.
                    layer.ColorTextureRight      = CreateTextureSwapChain(sessionPtr, testEngine);
                    layer.ViewportRight.Position = new Vector2i(0, 0);
                    layer.ViewportRight.Size     = sizeLeft;
                    layer.FovRight        = fieldOfView;
                    layer.RenderPoseRight = poses[1];


                    // The created texture swap chain must be committed to the Oculus SDK, before using it in the
                    // call to ovr_SubmitFrame, otherwise ovr_SubmitFrame will fail.
                    result = OVR.CommitTextureSwapChain(sessionPtr, layer.ColorTextureLeft);
                    Assert.IsTrue(result >= Result.Success);

                    result = OVR.CommitTextureSwapChain(sessionPtr, layer.ColorTextureRight);
                    Assert.IsTrue(result >= Result.Success);


                    // SubmitFrame requires pointer to an array of pointers to Layer objects
                    var layerPointers = new IntPtr[1];

                    GCHandle layerHandle         = GCHandle.Alloc(layer, GCHandleType.Pinned);
                    GCHandle layerPointersHandle = GCHandle.Alloc(layerPointers, GCHandleType.Pinned);

                    layerPointers[0] = layerHandle.AddrOfPinnedObject();

                    result = OVR.SubmitFrame(sessionPtr, 0L, IntPtr.Zero, layerPointersHandle.AddrOfPinnedObject(), 1);
                    Assert.IsTrue(result >= Result.Success);

                    layerPointersHandle.Free();
                    layerHandle.Free();
                }
                finally
                {
                    if (layer.ColorTextureLeft != IntPtr.Zero)
                    {
                        OVR.DestroyTextureSwapChain(sessionPtr, layer.ColorTextureLeft);
                    }

                    if (layer.ColorTextureRight != IntPtr.Zero)
                    {
                        OVR.DestroyTextureSwapChain(sessionPtr, layer.ColorTextureRight);
                    }
                }
            }
        }
예제 #16
0
 static extern Matrix4 _ovrMatrix4f_Projection(FovPort vof, float znear, float zfar, ProjectionModifier projectionModFlags);
예제 #17
0
			protected override void OnRenderFrame(FrameEventArgs e)
			{
				base.OnRenderFrame(e);
				GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);

				Vector3 camPos = new Vector3(0, 2, 5);
				Quaternion camOri = Quaternion.Identity;


#if RENDER_OCULUS
				for (int i = 0; i < 2; i++)
				{
					//zoom field of view
					var fov = new FovPort
					{
						DownTan = eyes[i].desc.Fov.DownTan / zoomFactor,
						UpTan = eyes[i].desc.Fov.UpTan / zoomFactor,
						LeftTan = eyes[i].desc.Fov.LeftTan / zoomFactor,
						RightTan = eyes[i].desc.Fov.RightTan / zoomFactor
					};


					eyes[i].proj = OvrDLL.ovrMatrix4f_Projection(fov, 0.1f, 1000.0f, ProjectionModifier.ClipRangeOpenGL);

					//bind eye fbo
					bindFbo(eyes[i]);

					//combine the "camera" position/rotation with the position/rotation of the eye
					Vector3 finalPos = camPos + (camOri * eyes[i].pose.Position);
					Matrix4 finalRot = Matrix4.CreateFromQuaternion(camOri * eyes[i].pose.Orientation);

					//create the view matrix with a lookat basis vectors
					Vector3 up = eyes[i].pose.Orientation * Vector3.UnitY;
					Vector3 fwd = eyes[i].pose.Orientation * -Vector3.UnitZ;
					Matrix4 view = Matrix4.LookAt(finalPos, finalPos + fwd, up);

					//draw the scene
					drawScene(view, eyes[i].proj);

					//commit the swapchain
					OvrDLL.ovr_CommitTextureSwapChain(session, eyes[i].swapChain);
				}

				//send to Oculus
				LayerEyeFov eyeFov = layers[0] as LayerEyeFov;
				eyeFov.Header.Flags = LayerFlags.TextureOriginAtBottomLeft;
				eyeFov.ColorTexture[0] = eyes[0].swapChain;
				eyeFov.ColorTexture[1] = eyes[1].swapChain;
				eyeFov.Fov[0] = eyes[0].desc.Fov;
				eyeFov.Fov[1] = eyes[1].desc.Fov;
				eyeFov.Viewport[0] = new Recti(new Vector2i(0,0), eyes[0].renderTargetSize);
				eyeFov.Viewport[1] = new Recti(new Vector2i(0, 0), eyes[1].renderTargetSize);
				eyeFov.RenderPose[0] = eyes[0].pose;
				eyeFov.RenderPose[1] = eyes[1].pose;

 				ViewScaleDesc viewScale = new ViewScaleDesc();
				result = OvrDLL.ovr_SubmitFrame(session, 0, ref viewScale, layers.GetUnmanagedLayers(), 1);
				if (result < 0)
				{
					Console.WriteLine("Error submitting frame");
					OvrDLL.ovr_GetLastErrorInfo(ref error);
					Console.WriteLine("Last Error Info: {0}-{1}", error.result, error.ErrorString);
				}

				//blit mirror to fbo
				OvrDLL.ovr_GetMirrorTextureBufferGL(session, mirror, ref myMirrorTexture);
				blitMirror((int)myMirrorTexture);
#else
				Matrix4 view = Matrix4.CreateTranslation(-camPos);
				Matrix4 proj = Matrix4.CreatePerspectiveFieldOfView(MathHelper.DegreesToRadians(60.0f), 1.0f, 0.1f, 1000.0f);
				drawScene(view, proj);
#endif
				SwapBuffers();
			}