Beispiel #1
0
        protected override void OnLoad(EventArgs e)
        {
            base.OnLoad(e);

            InitShader();
            InitBuffer();

            // Initialize the Oculus runtime.
            bool success = wrap.Initialize();
            if (!success)
            {
                MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
                Exit();
                return;
            }

            // Use the head mounted display.
            OVR.GraphicsLuid graphicsLuid;
            hmd = wrap.Hmd_Create(out graphicsLuid);
            if (hmd == null)
            {
                MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
                Exit();
                return;
            }

            if (hmd.ProductName == string.Empty)
            {
                MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error);
                Exit();
                return;
            }

            Console.WriteLine("SDK Version: " + wrap.GetVersionString());

            for (int i = 0; i < 2; i++)
            {
                OVR.Sizei idealTextureSize = hmd.GetFovTextureSize((OVR.EyeType)i, hmd.DefaultEyeFov[i], 1);
                eyeRenderTexture[i] = new OvrSharedRendertarget(idealTextureSize.Width, idealTextureSize.Height, hmd);
                eyeDepthBuffer[i] = new DepthBuffer(eyeRenderTexture[i].Width, eyeRenderTexture[i].Height);
            }

            //For image displayed at ordinary monitor - copy of Oculus rendered one.
            hmd.CreateMirrorTextureGL((uint)All.Srgb8Alpha8, this.Width, this.Height, out mirrorTex);

            layerFov = layers.AddLayerEyeFov();
            layerFov.Header.Flags = OVR.LayerFlags.TextureOriginAtBottomLeft; // OpenGL Texture coordinates start from bottom left
            layerFov.Header.Type = OVR.LayerType.EyeFov;

            //Rendertarget for mirror desktop window
            GL.GenFramebuffers(1, out mirrorFbo);
            GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, mirrorFbo);
            GL.FramebufferTexture2D(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2D, mirrorTex.Texture.TexId, 0);
            GL.FramebufferRenderbuffer(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.DepthAttachment, RenderbufferTarget.Renderbuffer, 0);
            GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, 0);

            EyeRenderDesc[0] = hmd.GetRenderDesc(OVR.EyeType.Left, hmd.DefaultEyeFov[0]);
            EyeRenderDesc[1] = hmd.GetRenderDesc(OVR.EyeType.Right, hmd.DefaultEyeFov[1]);

            // Specify which head tracking capabilities to enable.
            hmd.SetEnabledCaps(OVR.HmdCaps.DebugDevice);

            // Start the sensor
            //Update SDK 0.8: Usage of ovr_ConfigureTracking is no longer needed unless you want to disable tracking features. By default, ovr_Create enables the full tracking capabilities supported by any given device.
            //hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None);

            this.VSync = VSyncMode.Off;

            hmd.RecenterPose();

            // Init GL
            GL.Enable(EnableCap.DepthTest);
        }
Beispiel #2
0
        protected override void OnLoad(EventArgs e)
        {
            base.OnLoad(e);

            InitShader();
            InitBuffer();

            // Define initialization parameters with debug flag.
            OVRTypes.InitParams initializationParameters = new OVRTypes.InitParams();
            initializationParameters.Flags = OVRTypes.InitFlags.Debug;

            // Initialize the Oculus runtime.
            bool success = wrap.Initialize(initializationParameters);
            if (!success)
            {
                MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
                Exit();
                return;
            }

            // Use the head mounted display.
            OVRTypes.GraphicsLuid graphicsLuid;
            hmd = wrap.Hmd_Create(out graphicsLuid);
            if (hmd == null)
            {
                MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
                Exit();
                return;
            }

            if (hmd.ProductName == string.Empty)
            {
                MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error);
                Exit();
                return;
            }

            Console.WriteLine("SDK Version: " + wrap.GetVersionString());

            try
            {
                for (int i = 0; i < 2; i++)
                {
                    OVRTypes.Sizei idealTextureSize = hmd.GetFovTextureSize((OVRTypes.EyeType)i, hmd.DefaultEyeFov[i], 1);
                    eyeRenderTexture[i] = new TextureBuffer(wrap, hmd, true, true, idealTextureSize, 1, IntPtr.Zero, 1);
                    eyeDepthBuffer[i] = new DepthBuffer(eyeRenderTexture[i].GetSize(), 0);
                }

                // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution
                windowSize = new OVRTypes.Sizei(hmd.Resolution.Width / 2, hmd.Resolution.Height / 2);

                //For image displayed at ordinary monitor - copy of Oculus rendered one.
                OVRTypes.MirrorTextureDesc mirrorTextureDescription = new OVRTypes.MirrorTextureDesc();
                mirrorTextureDescription.Format = OVRTypes.TextureFormat.R8G8B8A8_UNORM_SRGB;
                mirrorTextureDescription.Width = windowSize.Width;
                mirrorTextureDescription.Height = windowSize.Height;
                mirrorTextureDescription.MiscFlags = OVRTypes.TextureMiscFlags.None;

                // Create the texture used to display the rendered result on the computer monitor.
                OVRTypes.Result result;
                result = hmd.CreateMirrorTextureGL(mirrorTextureDescription, out mirrorTexture);
                WriteErrorDetails(wrap, result, "Failed to create mirror texture.");

                layerFov = layers.AddLayerEyeFov();
                layerFov.Header.Flags = OVRTypes.LayerFlags.TextureOriginAtBottomLeft; // OpenGL Texture coordinates start from bottom left
                layerFov.Header.Type = OVRTypes.LayerType.EyeFov;

                // Configure the mirror read buffer
                uint texId;
                result = mirrorTexture.GetBufferGL(out texId);
                WriteErrorDetails(wrap, result, "Failed to retrieve the texture from the created mirror texture buffer.");

                //Rendertarget for mirror desktop window
                GL.GenFramebuffers(1, out mirrorFbo);
                GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, mirrorFbo);
                GL.FramebufferTexture2D(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2D, texId, 0);
                GL.FramebufferRenderbuffer(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.DepthAttachment, RenderbufferTarget.Renderbuffer, 0);
                GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, 0);

                // Turn off vsync to let the compositor do its magic
                this.VSync = VSyncMode.Off; //wglSwapIntervalEXT(0);

                // FloorLevel will give tracking poses where the floor height is 0
                result = hmd.SetTrackingOriginType(OVRTypes.TrackingOrigin.FloorLevel);
                WriteErrorDetails(wrap, result, "Failed to set tracking origin type.");

                GL.Enable(EnableCap.DepthTest); //DO NOT DELETE IT IN FUTURE UPDATES!
            }
            catch
            {
                // Release all resources
                Dispose(layers);
                if (mirrorFbo != 0) GL.DeleteFramebuffers(1, ref mirrorFbo);
                Dispose(mirrorTexture);
                for (int eyeIndex = 0; eyeIndex < 2; ++eyeIndex)
                {
                    Dispose(eyeRenderTexture[eyeIndex]);
                    Dispose(eyeDepthBuffer[eyeIndex]);
                }

                // Disposing the device, before the hmd, will cause the hmd to fail when disposing.
                // Disposing the device, after the hmd, will cause the dispose of the device to fail.
                // It looks as if the hmd steals ownership of the device and destroys it, when it's shutting down.
                // device.Dispose();
                Dispose(hmd);
                Dispose(wrap);
            }
        }
        public void SetAndClearRenderSurface(DepthBuffer dbuffer)
        {
            uint curTexId;
            OVRTypes.Result result;

            if (TextureChain != null)
            {
                int curIndex;
                result = TextureChain.GetCurrentIndex(out curIndex);
                WriteErrorDetails(wrap, result, "Failed to retrieve texture swap chain current index.");
                result = TextureChain.GetBufferGL(curIndex, out curTexId);
                WriteErrorDetails(wrap, result, "Failed to retrieve a texture from the created swap chain.");
            }
            else
            {
                curTexId = texId;
            }

            GL.BindFramebuffer(FramebufferTarget.Framebuffer, fboId);
            GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2D, curTexId, 0);
            GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, FramebufferAttachment.DepthAttachment, TextureTarget.Texture2D, dbuffer.TexId, 0);

            GL.Viewport(0, 0, texSize.Width, texSize.Height);
            GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);

            //Disbale SRGB - needed for correct colorspace!
            GL.Disable(EnableCap.FramebufferSrgb);
        }