Пример #1
0
        /// <summary>
        /// InitializeRenderingSteps is called when the VirtualRealityProvider is initialized and should add customer rendering steps to the DXScene.RenderingSteps list.
        /// See remarks for more into.
        /// </summary>
        /// <remarks>
        /// <para>
        /// <b>InitializeRenderingSteps</b> is called when the VirtualRealityProvider is initialized and should add customer rendering steps to the DXScene.RenderingSteps list.
        /// </para>
        /// <para>
        /// Usually the virtual reality rendering provider adds 3 rendering steps to existing rendering step:<br/>
        /// 1) <see cref="BeginVirtualRealityRenderingStep"/> is added before DXScene.DefaultPrepareRenderTargetsRenderingStep (prepares the rendering context for the currently rendered eys)<br/>
        /// 2) <see cref="RenderingStepsLoop"/> is added after DXScene.DefaultResolveMultisampledBackBufferRenderingStep (this renders the scene again for the other eye - jumps to BeginVirtualRealityRenderingStep)<br/>
        /// 3) <see cref="SimpleResolveStereoscopicImagesRenderingStep"/> or similar step is added after RenderingStepsLoop (to render post-process effects after the VR resolve) or befor DXScene.DefaultCompleteRenderingStep (to render post-process effects before the VS resolve).
        /// </para>
        /// <para>
        /// This method usually also created the pixel shaders and constant buffers.
        /// Other resources (back buffers and views) are usually created in <see cref="VirtualRealityProviderBase.UpdateRenderingContext"/> where the size of the current back buffer is compared with the size of back buffers for virtual reality.
        /// </para>
        /// <para>
        /// It is recommended that the created rendering steps are protected or public with private setter.
        /// This way a derived class can override the InitializeRenderingSteps method and add the created rendering steps in some other was to the DXScene.RenderingSteps.
        /// </para>
        /// </remarks>
        /// <param name="dxScene">parent DXScene</param>
        protected override void InitializeRenderingSteps(DXScene dxScene)
        {
            // Call base class to:
            // Create and add beginVirtualRealityRenderingStep
            // Create and add renderingStepsLoop
            base.InitializeRenderingSteps(dxScene);

            if (_resetViewportRenderingStep != null)
            {
                dxScene.RenderingSteps.Remove(_resetViewportRenderingStep);
                _resetViewportRenderingStep.Dispose();
            }


            // After both eyes were rendered, we need to reset the Viewport back to full screen
            // This can be done with adding the ChangeBackBufferRenderingStep after the renderingStepsLoop (after both eyes are rendered)
            // ChangeBackBufferRenderingStep is usually used to change current back buffer and its views, but it can be also used to change only Viewport.
            // Here we only create an instance of ChangeBackBufferRenderingStep and add it to RenderingSteps.
            // In the UpdateRenderingContext (below) we will set the NewViewport property to the size of the FinalBackBuffer
            _resetViewportRenderingStep = new ChangeBackBufferRenderingStep("ResetViewportRenderingStep", "Resets the Viewport from split screen viewport to the final full screen viewport");

            dxScene.RenderingSteps.AddAfter(dxScene.DefaultResolveMultisampledBackBufferRenderingStep, _resetViewportRenderingStep);


            if (renderingStepsLoop != null)
            {
                dxScene.RenderingSteps.Remove(renderingStepsLoop);
            }

            // We need to call _textureSwapChain.Commit() after image for each eye is rendered

            // We create a loop in rendering steps with adding a RenderingStepsLoop (this is the last step in the loop)
            // The loop begins with beginVirtualRealityRenderingStep (when the loop is repeated, the execution goes back to beginVirtualRealityRenderingStep step)
            // The RenderingStepsLoop also requires a loopPredicate that determines if the loop should repeat (returns true) or exit (returns false).
            renderingStepsLoop = new RenderingStepsLoop("RepeatVirtualRealityLoop",
                                                        beginLoopRenderingStep: beginVirtualRealityRenderingStep,
                                                        loopPredicate: (RenderingContext r) =>
            {
                // This predicate is executed when with the RenderingStepsLoop execution.
                // It returns true in case the rendering loop should repeat itself, or false when it should exit.
                // As seen from the return statement below, we repeat the rendering loop when the stereoscopic rendering is enabled and when we have rendered the left eye
                var currentEye = r.VirtualRealityContext.CurrentEye;

                if (_eyeTextureSwapChains != null)
                {
                    // Update the _sessionStatus before rendering the frame
                    if (currentEye == Eye.Left)
                    {
                        UpdateSessionStatus();

                        if (_sessionStatus.ShouldRecenter)
                        {
                            _ovr.RecenterTrackingOrigin(_sessionPtr);
                        }
                    }

                    if (_sessionStatus.IsVisible)     // We should submit OVR frames only when VR has focus
                    {
                        int eyeIndex = currentEye == Eye.Left ? 0 : 1;

                        _eyeTextureSwapChains[eyeIndex].Commit();

                        if (currentEye == Eye.Right)
                        {
                            _layerShared.Header.Type  = LayerType.EyeFov;
                            _layerShared.Header.Flags = LayerFlags.None;

                            _layerShared.ColorTextureLeft = _eyeTextureSwapChains[0].TextureSwapChainPtr;
                            _layerShared.ViewportLeft     = new Recti(new Vector2i(0, 0), new Sizei(_eyeTextureSwapChains[0].ViewportSize.Width, _eyeTextureSwapChains[0].ViewportSize.Height));
                            _layerShared.FovLeft          = _hmdDesc.DefaultEyeFov[0];
                            _layerShared.RenderPoseLeft   = _eyePoses[0];

                            _layerShared.ColorTextureRight = _eyeTextureSwapChains[1].TextureSwapChainPtr;
                            _layerShared.ViewportRight     = new Recti(new Vector2i(0, 0), new Sizei(_eyeTextureSwapChains[1].ViewportSize.Width, _eyeTextureSwapChains[1].ViewportSize.Height));
                            _layerShared.FovRight          = _hmdDesc.DefaultEyeFov[1];
                            _layerShared.RenderPoseRight   = _eyePoses[1];

                            _layerShared.SensorSampleTime = _sensorSampleTime;

                            var result = _ovr.SubmitFrame(_sessionPtr, _frameIndex, IntPtr.Zero, ref _layerShared);

                            if (result < Ab3d.OculusWrap.Result.Success)
                            {
                                var lastError = _ovr.GetLastErrorInfo();
                                throw new OvrException("Failed to sumbit frame: " + result);
                            }

                            _frameIndex++;
                        }
                    }

                    if (_mirrorTextureDesc.Width == r.FinalBackBufferDescription.Width && _mirrorTextureDesc.Height == r.FinalBackBufferDescription.Height)
                    {
                        r.DeviceContext.CopyResource(_mirrorTextureDX, r.FinalBackBuffer);
                    }
                }

                // Repeat the rendering loop when the stereoscopic rendering is enabled and when we have rendered the left eye
                return(this.IsEnabled &&
                       r.VirtualRealityContext != null &&
                       currentEye == Eye.Left);
            });

            dxScene.RenderingSteps.AddAfter(dxScene.DefaultResolveMultisampledBackBufferRenderingStep, renderingStepsLoop);
        }
Пример #2
0
        private void InitializeOvrAndDirectX()
        {
            if (UseOculusRift)
            {
                // Initializing Oculus VR is very simple when using OculusWrapVirtualRealityProvider
                // First we create an instance of OculusWrapVirtualRealityProvider
                _oculusRiftVirtualRealityProvider = new OculusWrapVirtualRealityProvider(_ovr, multisamplingCount: 4);

                try
                {
                    // Then we initialize Oculus OVR and create a new DXDevice that uses the same adapter (graphic card) as Oculus Rift
                    _dxDevice = _oculusRiftVirtualRealityProvider.InitializeOvrAndDXDevice(requestedOculusSdkMinorVersion: 17);
                }
                catch (Exception ex)
                {
                    MessageBox.Show("Failed to initialize the Oculus runtime library.\r\nError: " + ex.Message, "Oculus error", MessageBoxButton.OK, MessageBoxImage.Error);
                    return;
                }

                string ovrVersionString = _ovr.GetVersionString();
                _originalWindowTitle = string.Format("DXEngine OculusWrap Sample (OVR v{0})", ovrVersionString);
                this.Title           = _originalWindowTitle;


                // Reset tracking origin at startup
                _ovr.RecenterTrackingOrigin(_oculusRiftVirtualRealityProvider.SessionPtr);
            }
            else
            {
                // Create DXDevice that will be used to create DXViewportView
                var dxDeviceConfiguration = new DXDeviceConfiguration();
                dxDeviceConfiguration.DriverType             = DriverType.Hardware;
                dxDeviceConfiguration.SupportedFeatureLevels = new FeatureLevel[] { FeatureLevel.Level_11_0 }; // Oculus requires at least feature level 11.0

                _dxDevice = new DXDevice(dxDeviceConfiguration);
                _dxDevice.InitializeDevice();

                _originalWindowTitle = this.Title;
            }



            // Create WPF's Viewport3D
            _viewport3D = new Viewport3D();

            // Create DXViewportView - a control that will enable DirectX 11 rendering of standard WPF 3D content defined in Viewport3D.
            // We use a specified DXDevice that was created by the _oculusRiftVirtualRealityProvider.InitializeOvrAndDXDevice (this way the same adapter is used by Oculus and DXEngine).
            _dxViewportView = new DXViewportView(_dxDevice, _viewport3D);

            _dxViewportView.BackgroundColor = Colors.Aqua;

            // Currently DXEngine support showing Oculus mirror texture only with DirectXOverlay presentation type (not with DirectXImage)
            _dxViewportView.PresentationType = DXView.PresentationTypes.DirectXOverlay;

            if (UseOculusRift)
            {
                // The _dxViewportView will show Oculus mirrow window.
                // The mirror window can be any size, for this sample we use 1/2 the HMD resolution.
                _dxViewportView.Width  = _oculusRiftVirtualRealityProvider.HmdDescription.Resolution.Width / 2.0;
                _dxViewportView.Height = _oculusRiftVirtualRealityProvider.HmdDescription.Resolution.Height / 2.0;
            }


            // When the DXViewportView is initialized, we set the _oculusRiftVirtualRealityProvider to the DXScene object
            _dxViewportView.DXSceneInitialized += delegate(object sender, EventArgs args)
            {
                if (_dxViewportView.UsedGraphicsProfile.DriverType != GraphicsProfile.DriverTypes.Wpf3D &&
                    _dxViewportView.DXScene != null &&
                    _oculusRiftVirtualRealityProvider != null)
                {
                    // Initialize Virtual reality rendering
                    _dxViewportView.DXScene.InitializeVirtualRealityRendering(_oculusRiftVirtualRealityProvider);


                    // Initialized shadow rendering (see Ab3d.DXEngine.Wpf.Samples project - DXEngine/ShadowRenderingSample for more info
                    _varianceShadowRenderingProvider = new VarianceShadowRenderingProvider()
                    {
                        ShadowMapSize          = 1024,
                        ShadowDepthBluringSize = 2,
                        ShadowTreshold         = 0.2f
                    };

                    _dxViewportView.DXScene.InitializeShadowRendering(_varianceShadowRenderingProvider);
                }
            };


            // Enable collecting rendering statistics (see _dxViewportView.DXScene.Statistics class)
            DXDiagnostics.IsCollectingStatistics = true;

            // Subscribe to SceneRendered to collect FPS statistics
            _dxViewportView.SceneRendered += DXViewportViewOnSceneRendered;

            // Add _dxViewportView to the RootGrid
            // Before that we resize the window to be big enough to show the mirrored texture
            this.Width  = _dxViewportView.Width + 30;
            this.Height = _dxViewportView.Height + 50;

            RootGrid.Children.Add(_dxViewportView);


            // Create FirstPersonCamera
            _camera = new FirstPersonCamera()
            {
                TargetViewport3D = _viewport3D,
                Position         = new Point3D(0, 1, 4),
                Heading          = 0,
                Attitude         = 0,
                ShowCameraLight  = ShowCameraLightType.Never
            };

            RootGrid.Children.Add(_camera);


            // Initialize XBOX controller that will control the FirstPersonCamera
            _xInputCameraController = new XInputCameraController();
            _xInputCameraController.TargetCamera  = _camera;
            _xInputCameraController.MovementSpeed = 0.02;
            _xInputCameraController.MoveVerticallyWithDPadButtons = true;

            // We handle the rotation by ourself to prevent rotating the camera up and down - this is done only by HMD
            _xInputCameraController.RightThumbChanged += delegate(object sender, XInputControllerThumbChangedEventArgs e)
            {
                // Apply only horizontal rotation
                _camera.Heading += e.NormalizedX * _xInputCameraController.RotationSpeed;

                // Mark the event as handled
                e.IsHandled = true;
            };

            _xInputCameraController.StartCheckingController();


            // Now we can create our sample 3D scene
            CreateSceneObjects();

            // Add lights
            var lightsVisual3D = new ModelVisual3D();
            var lightsGroup    = new Model3DGroup();

            var directionalLight = new DirectionalLight(Colors.White, new Vector3D(0.5, -0.3, -0.3));

            directionalLight.SetDXAttribute(DXAttributeType.IsCastingShadow, true); // Set this light to cast shadow
            lightsGroup.Children.Add(directionalLight);

            var ambientLight = new AmbientLight(System.Windows.Media.Color.FromRgb(30, 30, 30));

            lightsGroup.Children.Add(ambientLight);

            lightsVisual3D.Content = lightsGroup;
            _viewport3D.Children.Add(lightsVisual3D);


            // Start rendering
            if (RenderAt90Fps)
            {
                // WPF do not support rendering at more the 60 FPS.
                // But with a trick where a rendering loop is created in a background thread, it is possible to achieve more than 60 FPS.
                // In case of sumbiting frames to Oculus Rift, the ovr.SubmitFrame method will limit rendering to 90 FPS.
                //
                // NOTE:
                // When using DXEngine, it is also possible to render the scene in a background thread.
                // This requires that the 3D scene is also created in the background thread and that the events and other messages are
                // passed between UI and background thread in a thread safe way. This is too complicated for this simple sample project.
                // To see one possible implementation of background rendering, see the BackgroundRenderingSample in the Ab3d.DXEngine.Wpf.Samples project.
                var backgroundWorker = new BackgroundWorker();
                backgroundWorker.DoWork += (object sender, DoWorkEventArgs args) =>
                {
                    // Create an action that will be called by Dispatcher
                    var refreshDXEngineAction = new Action(() =>
                    {
                        UpdateScene();

                        // Render DXEngine's 3D scene again
                        if (_dxViewportView != null)
                        {
                            _dxViewportView.Refresh();
                        }
                    });

                    while (_dxViewportView != null && !_dxViewportView.IsDisposed)                                                       // Render until window is closed
                    {
                        if (_oculusRiftVirtualRealityProvider != null && _oculusRiftVirtualRealityProvider.LastSessionStatus.ShouldQuit) // Stop rendering - this will call RunWorkerCompleted where we can quit the application
                        {
                            break;
                        }

                        // Sleep for 1 ms to allow WPF tasks to complete (for example handling XBOX controller events)
                        System.Threading.Thread.Sleep(1);

                        // Call Refresh to render the DXEngine's scene
                        // This is a synchronous call and will wait until the scene is rendered.
                        // Because Oculus is limited to 90 fps, the call to ovr.SubmitFrame will limit rendering to 90 FPS.
                        Dispatcher.Invoke(refreshDXEngineAction);
                    }
                };

                backgroundWorker.RunWorkerCompleted += delegate(object sender, RunWorkerCompletedEventArgs args)
                {
                    if (_oculusRiftVirtualRealityProvider != null && _oculusRiftVirtualRealityProvider.LastSessionStatus.ShouldQuit)
                    {
                        this.Close(); // Exit the application
                    }
                };

                backgroundWorker.RunWorkerAsync();
            }
            else
            {
                // Subscribe to WPF rendering event (called approximately 60 times per second)
                CompositionTarget.Rendering += CompositionTargetOnRendering;
            }
        }