예제 #1
0
        private void InitializeSharpDXRendering(DXScene dxScene)
        {
            var device = dxScene.Device;

            // DirectX device, back buffer, render targets and other resources are initialized by DXEngine
            // So here we only initialize things that are added by this sample

            // Code from SharpDX MiniCube sample:
            //// Compile Vertex and Pixel shaders
            //var vertexShaderByteCode = ShaderBytecode.CompileFromFile("MiniCube.hlsl", "VS", "vs_4_0");
            //var vertexShader = new VertexShader(device, vertexShaderByteCode);

            //var pixelShaderByteCode = ShaderBytecode.CompileFromFile("MiniCube.hlsl", "PS", "ps_4_0");
            //var pixelShader = new PixelShader(device, pixelShaderByteCode);


            var vertexShaderByteCode = System.IO.File.ReadAllBytes(System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources\Shaders\MiniCube.vs"));

            _vertexShader = new VertexShader(device, vertexShaderByteCode);

            var pixelShaderByteCode = System.IO.File.ReadAllBytes(System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources\Shaders\MiniCube.ps"));

            _pixelShader = new PixelShader(device, pixelShaderByteCode);

            // Code from SharpDX MiniCube sample:
            //var signature = ShaderSignature.GetInputSignature(vertexShaderByteCode);

            //// Layout from VertexShader input signature
            //var layout = new InputLayout(device, signature, new[]
            //        {
            //            new InputElement("POSITION", 0, Format.R32G32B32A32_Float, 0, 0),
            //            new InputElement("COLOR", 0, Format.R32G32B32A32_Float, 16, 0)
            //        });
            _layout = new InputLayout(device, vertexShaderByteCode, new[]
            {
                new InputElement("POSITION", 0, Format.R32G32B32A32_Float, 0, 0),
                new InputElement("COLOR", 0, Format.R32G32B32A32_Float, 16, 0)
            });


            // Instantiate Vertex buffer from vertex data
            var vertexBuffer = CustomRenderingStep1.GetSharpDXBoxVertexBuffer();

            _vertices = Buffer.Create(device, BindFlags.VertexBuffer, vertexBuffer);


            // Create Constant Buffer
            _constantBuffer = new Buffer(device, SharpDX.Utilities.SizeOf <Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);


            // We need to disable calculating near and far plane distanced based on the objects in 3D scene.
            // Without that the objects rendered with SharpDX could be clipped because their size is not accounted in the near and far plane calculations.
            // By default OptimizeNearAndFarCameraPlanes is set to true because this greatly improves the resolution of the depth buffer and
            // therefore reduces the possibility of Z-fighting artifacts.
            dxScene.OptimizeNearAndFarCameraPlanes = false;
        }
        private void InitializeSharpDXRendering(DXScene dxScene)
        {
            var device = dxScene.Device;

            // DirectX device, back buffer, render targets and other resources are initialized by DXEngine
            // So here we only initialize things that are added by this sample

            // Code from SharpDX MiniCube sample:
            //// Compile Vertex and Pixel shaders
            //var vertexShaderByteCode = ShaderBytecode.CompileFromFile("MiniCube.hlsl", "VS", "vs_4_0");
            //var vertexShader = new VertexShader(device, vertexShaderByteCode);

            //var pixelShaderByteCode = ShaderBytecode.CompileFromFile("MiniCube.hlsl", "PS", "ps_4_0");
            //var pixelShader = new PixelShader(device, pixelShaderByteCode);


            var vertexShaderByteCode = System.IO.File.ReadAllBytes(System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources\Shaders\MiniCube.vs"));

            _vertexShader = new VertexShader(device, vertexShaderByteCode);

            var pixelShaderByteCode = System.IO.File.ReadAllBytes(System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"Resources\Shaders\MiniCube.ps"));

            _pixelShader = new PixelShader(device, pixelShaderByteCode);

            // Code from SharpDX MiniCube sample:
            //var signature = ShaderSignature.GetInputSignature(vertexShaderByteCode);

            //// Layout from VertexShader input signature
            //var layout = new InputLayout(device, signature, new[]
            //        {
            //            new InputElement("POSITION", 0, Format.R32G32B32A32_Float, 0, 0),
            //            new InputElement("COLOR", 0, Format.R32G32B32A32_Float, 16, 0)
            //        });
            _layout = new InputLayout(device, vertexShaderByteCode, new[]
            {
                new InputElement("POSITION", 0, Format.R32G32B32A32_Float, 0, 0),
                new InputElement("COLOR", 0, Format.R32G32B32A32_Float, 16, 0)
            });


            // Instantiate Vertex buffer from vertex data
            var vertexBuffer = GetSharpDXBoxVertexBuffer();

            _vertices = Buffer.Create(device, BindFlags.VertexBuffer, vertexBuffer);

            // Create Constant Buffer
            _constantBuffer = new Buffer(device, SharpDX.Utilities.SizeOf <Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);


            // Use clock
            _clock = new Stopwatch();
            _clock.Start();
        }
        public SettingsEditorWindow(DXScene dxScene)
        {
            if (dxScene == null)
            {
                return;
            }

            CurrentDXScene = dxScene;

            InitializeComponent();

            CreateDXSceneSettings();
        }
예제 #4
0
        public PerformanceAnalyzer(DXView dxView, string name = null, int initialCapacity = 10000)
        {
            if (dxView == null)
            {
                throw new ArgumentNullException("dxView");
            }

            _dxView          = dxView;
            _name            = name;
            _initialCapacity = initialCapacity;

            _dxScene = _dxView.DXScene;
        }
        public RenderingFilterWindow(DXScene dxScene)
        {
            if (dxScene == null)
            {
                return;
            }

            CurrentDXScene = dxScene;

            InitializeComponent();

            CreateRenderingQueuesEditor();
            CreateRenderingStepsEditor();
        }
예제 #6
0
        public void StartCollectingStatistics()
        {
            if (_isCollectingStatistics) // Already collecting
            {
                return;
            }

            if (_dxScene == null)
            {
                _dxScene = _dxView.DXScene;
                if (_dxScene == null)
                {
                    throw new Exception("Cannot start collecting statistics because DXScene does not exist (probably was not yet initialized)");
                }
            }

            _allCollectedRenderingStatistics = new List <List <RenderingStatistics> >();

            _currentRenderingStatisticsList = CreatePreallocatedList(_initialCapacity);
            _allCollectedRenderingStatistics.Add(_currentRenderingStatisticsList);

            _currentIndex      = 0;
            _totalSamplesCount = 0;
            _timeToFirstFrame  = -1;


            // Store the number of time garbage collection has occured
            _garbageCollectionsCount = new int[3];
            for (int i = 0; i < GC.MaxGeneration; i++)
            {
                _garbageCollectionsCount[i] = GC.CollectionCount(i);
            }


            if (!DXDiagnostics.IsCollectingStatistics)
            {
                DXDiagnostics.IsCollectingStatistics = true;
                _isEnabledCollectingStatistics       = true; // we will disable collecting statistics when we are finished
            }

            _stopwatch = new Stopwatch();
            _stopwatch.Start();

            _dxScene.AfterFrameRendered += DXSceneOnAfterFrameRendered;

            _isCollectingStatistics = true;
        }
        protected override Point3D?GetRotationCenterPositionFromMousePosition(Point mousePosition, bool calculatePositionWhenNoObjectIsHit)
        {
            if (OptimizedPointMesh == null || DXScene == null)
            {
                return(base.GetRotationCenterPositionFromMousePosition(mousePosition, calculatePositionWhenNoObjectIsHit));
            }

            var mouseRay = DXScene.GetRayFromCamera((int)mousePosition.X, (int)mousePosition.Y);

            float distance;
            var   closestPositionIndex = OptimizedPointMesh.GetClosestPositionIndex(mouseRay, out distance);

            if (closestPositionIndex != -1 && MaxDistanceToAnyPosition > 0 && distance < MaxDistanceToAnyPosition)
            {
                return(OptimizedPointMesh.PositionsArray[closestPositionIndex].ToWpfPoint3D());
            }

            return(null);
        }
        private RenderObjectsRenderingStep EnsureRenderObjectsRenderingStep(DXScene dxScene)
        {
            if (_renderObjectOutlinesRenderingStep == null)
            {
                _renderObjectOutlinesRenderingStep = new RenderObjectsRenderingStep("RenderObjectOutlinesRenderingStep")
                {
                    OverrideEffect = _blackOutlineEffect,
                    FilterRenderingQueuesFunction = delegate(RenderingQueue queue)
                    {
                        // For test we will not render 3D lines (only all other objects)
                        return(queue != dxScene.LineGeometryRenderingQueue);
                    }
                };

                _disposables.Add(_renderObjectOutlinesRenderingStep);
            }

            return(_renderObjectOutlinesRenderingStep);
        }
        // This must be called on the same thread as the objects were created on
        protected virtual void Dispose(bool disposing)
        {
            if (dxViewportView != null)
            {
                dxViewportView.Dispose();
                dxViewportView = null;
            }

            if (dxScene != null)
            {
                dxScene.Dispose();
                dxScene = null;
            }

            if (_dxDevice != null)
            {
                _dxDevice.Dispose();
                _dxDevice = null;
            }

            wpfViewport3D = null;
        }
예제 #10
0
        /// <summary>
        /// InitializeRenderingSteps is called when the VirtualRealityProvider is initialized and should add customer rendering steps to the DXScene.RenderingSteps list.
        /// See remarks for more into.
        /// </summary>
        /// <remarks>
        /// <para>
        /// <b>InitializeRenderingSteps</b> is called when the VirtualRealityProvider is initialized and should add customer rendering steps to the DXScene.RenderingSteps list.
        /// </para>
        /// <para>
        /// Usually the virtual reality rendering provider adds 3 rendering steps to existing rendering step:<br/>
        /// 1) <see cref="BeginVirtualRealityRenderingStep"/> is added before DXScene.DefaultPrepareRenderTargetsRenderingStep (prepares the rendering context for the currently rendered eys)<br/>
        /// 2) <see cref="RenderingStepsLoop"/> is added after DXScene.DefaultResolveMultisampledBackBufferRenderingStep (this renders the scene again for the other eye - jumps to BeginVirtualRealityRenderingStep)<br/>
        /// 3) <see cref="SimpleResolveStereoscopicImagesRenderingStep"/> or similar step is added after RenderingStepsLoop (to render post-process effects after the VR resolve) or befor DXScene.DefaultCompleteRenderingStep (to render post-process effects before the VS resolve).
        /// </para>
        /// <para>
        /// This method usually also created the pixel shaders and constant buffers.
        /// Other resources (back buffers and views) are usually created in <see cref="VirtualRealityProviderBase.UpdateRenderingContext"/> where the size of the current back buffer is compared with the size of back buffers for virtual reality.
        /// </para>
        /// <para>
        /// It is recommended that the created rendering steps are protected or public with private setter.
        /// This way a derived class can override the InitializeRenderingSteps method and add the created rendering steps in some other was to the DXScene.RenderingSteps.
        /// </para>
        /// </remarks>
        /// <param name="dxScene">parent DXScene</param>
        protected override void InitializeRenderingSteps(DXScene dxScene)
        {
            // Call base class to:
            // Create and add beginVirtualRealityRenderingStep
            // Create and add renderingStepsLoop
            base.InitializeRenderingSteps(dxScene);

            if (_resetViewportRenderingStep != null)
            {
                dxScene.RenderingSteps.Remove(_resetViewportRenderingStep);
                _resetViewportRenderingStep.Dispose();
            }


            // After both eyes were rendered, we need to reset the Viewport back to full screen
            // This can be done with adding the ChangeBackBufferRenderingStep after the renderingStepsLoop (after both eyes are rendered)
            // ChangeBackBufferRenderingStep is usually used to change current back buffer and its views, but it can be also used to change only Viewport.
            // Here we only create an instance of ChangeBackBufferRenderingStep and add it to RenderingSteps.
            // In the UpdateRenderingContext (below) we will set the NewViewport property to the size of the FinalBackBuffer
            _resetViewportRenderingStep = new ChangeBackBufferRenderingStep("ResetViewportRenderingStep", "Resets the Viewport from split screen viewport to the final full screen viewport");

            dxScene.RenderingSteps.AddAfter(dxScene.DefaultResolveMultisampledBackBufferRenderingStep, _resetViewportRenderingStep);


            if (renderingStepsLoop != null)
            {
                dxScene.RenderingSteps.Remove(renderingStepsLoop);
            }

            // We need to call _textureSwapChain.Commit() after image for each eye is rendered

            // We create a loop in rendering steps with adding a RenderingStepsLoop (this is the last step in the loop)
            // The loop begins with beginVirtualRealityRenderingStep (when the loop is repeated, the execution goes back to beginVirtualRealityRenderingStep step)
            // The RenderingStepsLoop also requires a loopPredicate that determines if the loop should repeat (returns true) or exit (returns false).
            renderingStepsLoop = new RenderingStepsLoop("RepeatVirtualRealityLoop",
                                                        beginLoopRenderingStep: beginVirtualRealityRenderingStep,
                                                        loopPredicate: (RenderingContext r) =>
            {
                // This predicate is executed when with the RenderingStepsLoop execution.
                // It returns true in case the rendering loop should repeat itself, or false when it should exit.
                // As seen from the return statement below, we repeat the rendering loop when the stereoscopic rendering is enabled and when we have rendered the left eye
                var currentEye = r.VirtualRealityContext.CurrentEye;

                if (_eyeTextureSwapChains != null)
                {
                    // Update the _sessionStatus before rendering the frame
                    if (currentEye == Eye.Left)
                    {
                        UpdateSessionStatus();

                        if (_sessionStatus.ShouldRecenter)
                        {
                            _ovr.RecenterTrackingOrigin(_sessionPtr);
                        }
                    }

                    if (_sessionStatus.IsVisible)     // We should submit OVR frames only when VR has focus
                    {
                        int eyeIndex = currentEye == Eye.Left ? 0 : 1;

                        _eyeTextureSwapChains[eyeIndex].Commit();

                        if (currentEye == Eye.Right)
                        {
                            _layerShared.Header.Type  = LayerType.EyeFov;
                            _layerShared.Header.Flags = LayerFlags.None;

                            _layerShared.ColorTextureLeft = _eyeTextureSwapChains[0].TextureSwapChainPtr;
                            _layerShared.ViewportLeft     = new Recti(new Vector2i(0, 0), new Sizei(_eyeTextureSwapChains[0].ViewportSize.Width, _eyeTextureSwapChains[0].ViewportSize.Height));
                            _layerShared.FovLeft          = _hmdDesc.DefaultEyeFov[0];
                            _layerShared.RenderPoseLeft   = _eyePoses[0];

                            _layerShared.ColorTextureRight = _eyeTextureSwapChains[1].TextureSwapChainPtr;
                            _layerShared.ViewportRight     = new Recti(new Vector2i(0, 0), new Sizei(_eyeTextureSwapChains[1].ViewportSize.Width, _eyeTextureSwapChains[1].ViewportSize.Height));
                            _layerShared.FovRight          = _hmdDesc.DefaultEyeFov[1];
                            _layerShared.RenderPoseRight   = _eyePoses[1];

                            _layerShared.SensorSampleTime = _sensorSampleTime;

                            var result = _ovr.SubmitFrame(_sessionPtr, _frameIndex, IntPtr.Zero, ref _layerShared);

                            if (result < Ab3d.OculusWrap.Result.Success)
                            {
                                var lastError = _ovr.GetLastErrorInfo();
                                throw new OvrException("Failed to sumbit frame: " + result);
                            }

                            _frameIndex++;
                        }
                    }

                    if (_mirrorTextureDesc.Width == r.FinalBackBufferDescription.Width && _mirrorTextureDesc.Height == r.FinalBackBufferDescription.Height)
                    {
                        r.DeviceContext.CopyResource(_mirrorTextureDX, r.FinalBackBuffer);
                    }
                }

                // Repeat the rendering loop when the stereoscopic rendering is enabled and when we have rendered the left eye
                return(this.IsEnabled &&
                       r.VirtualRealityContext != null &&
                       currentEye == Eye.Left);
            });

            dxScene.RenderingSteps.AddAfter(dxScene.DefaultResolveMultisampledBackBufferRenderingStep, renderingStepsLoop);
        }
예제 #11
0
        /// <summary>
        /// CreateResources is called when the VirtualRealityProvider is initialized and should create the DirectX resources.
        /// </summary>
        /// <remarks>
        /// <para>
        /// <b>CreateResources</b> is called when the VirtualRealityProvider is initialized and should create the DirectX resources.
        /// </para>
        /// <para>
        /// This method is called after this virtual reality provider is registered with calling the <see cref="DXScene.InitializeVirtualRealityRendering"/> method.
        /// This method then calls the <see cref="DXSceneResource.InitializeResources"/> and the <see cref="VirtualRealityProviderBase.OnInitializeResources"/>.
        /// OnInitializeResources calls the this CreateResources method and then <see cref="VirtualRealityProviderBase.InitializeRenderingSteps"/> method.
        /// </para>
        /// <para>
        /// This method usually creates pixel shaders and constant buffers.
        /// Other resources (back buffers and views) are usually created in <see cref="VirtualRealityProviderBase.UpdateRenderingContext"/> where the size of the current back buffer is compared with the size of back buffers for virtual reality.
        /// </para>
        /// </remarks>
        /// <param name="dxScene">parent DXScene</param>
        protected override void CreateResources(DXScene dxScene)
        {
            if (_eyeTextureSwapChains != null)
            {
                if (_eyeTextureSwapChains[0] != null)
                {
                    _eyeTextureSwapChains[0].Dispose();
                }

                if (_eyeTextureSwapChains[1] != null)
                {
                    _eyeTextureSwapChains[1].Dispose();
                }
            }
            else
            {
                _eyeTextureSwapChains = new OculusTextureSwapChain[2];
            }

            _eyeTextureSwapChains[0] = new OculusTextureSwapChain(_ovr,
                                                                  _sessionPtr,
                                                                  dxScene.Device,
                                                                  EyeType.Left,
                                                                  Format.B8G8R8A8_UNorm_SRgb,
                                                                  _ovr.GetFovTextureSize(_sessionPtr, EyeType.Left, _hmdDesc.DefaultEyeFov[0], 1.0f),
                                                                  createDepthStencilView: true,
                                                                  isDebugDevice: dxScene.DXDevice.IsDebugDevice);

            _eyeTextureSwapChains[1] = new OculusTextureSwapChain(_ovr,
                                                                  _sessionPtr,
                                                                  dxScene.Device,
                                                                  EyeType.Left,
                                                                  Format.B8G8R8A8_UNorm_SRgb,
                                                                  _ovr.GetFovTextureSize(_sessionPtr, EyeType.Right, _hmdDesc.DefaultEyeFov[1], 1.0f),
                                                                  createDepthStencilView: true,
                                                                  isDebugDevice: dxScene.DXDevice.IsDebugDevice);


            _layerShared        = new LayerEyeFov();
            _layerShared.Header = new LayerHeader()
            {
                Type  = LayerType.EyeFov,
                Flags = LayerFlags.HighQuality
            };

            // Specify the texture to show on the HMD.
            _layerShared.ColorTextureLeft  = _eyeTextureSwapChains[0].TextureSwapChainPtr;
            _layerShared.ColorTextureRight = _eyeTextureSwapChains[1].TextureSwapChainPtr;

            _layerShared.ViewportLeft.Position = new Vector2i(0, 0);
            _layerShared.ViewportLeft.Size     = _eyeTextureSwapChains[0].Size;

            _layerShared.ViewportRight.Position = new Vector2i(0, 0);
            _layerShared.ViewportRight.Size     = _eyeTextureSwapChains[1].Size;

            _layerShared.FovLeft  = _hmdDesc.DefaultEyeFov[0];
            _layerShared.FovRight = _hmdDesc.DefaultEyeFov[1];


            _eyeRenderDesc[0]  = _ovr.GetRenderDesc(_sessionPtr, EyeType.Left, _hmdDesc.DefaultEyeFov[0]);
            _hmdToEyeOffset[1] = _eyeRenderDesc[1].HmdToEyePose.Position;

            _eyeRenderDesc[1]  = _ovr.GetRenderDesc(_sessionPtr, EyeType.Right, _hmdDesc.DefaultEyeFov[1]);
            _hmdToEyeOffset[1] = _eyeRenderDesc[1].HmdToEyePose.Position;


            // Create MSAA back buffer if needed
            UpdateMsaaBackBuffer(_eyeTextureSwapChains[0].Size.Width, _eyeTextureSwapChains[0].Size.Height, _multisamplingCount);


            _mirrorTextureDesc = new MirrorTextureDesc()
            {
                Format    = SharpDXHelpers.GetTextureFormat(dxScene.BackBufferDescription.Format),
                Height    = dxScene.BackBufferDescription.Height,
                MiscFlags = dxScene.BackBufferDescription.MipLevels != 1 ? TextureMiscFlags.AllowGenerateMips : TextureMiscFlags.None,
                Width     = dxScene.BackBufferDescription.Width
            };

            // FloorLevel will give tracking poses where the floor height is 0
            _ovr.SetTrackingOriginType(_sessionPtr, TrackingOrigin.EyeLevel);

            IntPtr mirrorTexturePtr;
            var    result = _ovr.CreateMirrorTextureDX(_sessionPtr, dxScene.Device.NativePointer, ref _mirrorTextureDesc, out mirrorTexturePtr);

            if (result < Ab3d.OculusWrap.Result.Success)
            {
                var lastError = _ovr.GetLastErrorInfo();
                throw new OvrException("Failed to create Oculus mirror texture: " + lastError.ErrorString, lastError.Result);
            }

            _mirrorTexture = new OculusMirrorTexture(_ovr, _sessionPtr, mirrorTexturePtr);

            // Retrieve the Direct3D texture contained in the Oculus MirrorTexture.
            IntPtr mirrorTextureComPtr;

            result = _mirrorTexture.GetBufferDX(typeof(Texture2D).GUID, out mirrorTextureComPtr);

            if (result < Ab3d.OculusWrap.Result.Success)
            {
                var lastError = _ovr.GetLastErrorInfo();
                throw new OvrException("Failed to retrieve the texture from the created mirror texture buffer: " + lastError.ErrorString, lastError.Result);
            }

            // Create a managed Texture2D, based on the unmanaged texture pointer.
            _mirrorTextureDX = new Texture2D(mirrorTextureComPtr);

            if (dxScene.DXDevice.IsDebugDevice)
            {
                _mirrorTextureDX.DebugName = "OculusMirrorTexture";
            }


            // To prevent DirectX from rendering more then one frame in the background,
            // we need to set the MaximumFrameLatency to 1.
            // This prevents occasional dropped frames in Oculus Rift.
            var dxgiDevice = dxScene.Device.QueryInterface <SharpDX.DXGI.Device1>();

            if (dxgiDevice != null)
            {
                dxgiDevice.MaximumFrameLatency = 1;
                dxgiDevice.Dispose();
            }

            _frameIndex = 0;

            _matrixCamera = new MatrixCamera();
        }
 public PointCloudMouseCameraController(DXScene dxScene, OptimizedPointMesh <Vector3> optimizedPointMesh)
 {
     OptimizedPointMesh = optimizedPointMesh;
     DXScene            = dxScene;
 }
        /// <summary>
        /// CreateDXViewportView
        /// </summary>
        /// <param name="clientWindowWidth">clientWindowWidth</param>
        /// <param name="clientWindowHeight">clientWindowHeight</param>
        /// <param name="dpiScaleX">DPI scale: 1 means no scale (96 DPI)</param>
        /// <param name="dpiScaleY">DPI scale: 1 means no scale (96 DPI)</param>
        /// <param name="preferedMultisamplingCount">preferedMultisamplingCount</param>
        public void InitializeDXViewportView(int clientWindowWidth,
                                             int clientWindowHeight,
                                             double dpiScaleX,
                                             double dpiScaleY,
                                             int preferedMultisamplingCount)
        {
            // To render the 3D scene to the custom hWnd, we need to create the DXViewportView with a custom DXScene,
            // that was initialized with calling InitializeSwapChain mathod (with passed hWnd).

            // To create a custom DXScene we first need to create a DXDevice objects (wrapper around DirectX Device object)
            var dxDeviceConfiguration = new DXDeviceConfiguration();

            dxDeviceConfiguration.DriverType = DriverType.Hardware; // We could also specify Software rendering here

            try
            {
                _dxDevice = new DXDevice(dxDeviceConfiguration);
                _dxDevice.InitializeDevice();
            }
            catch (Exception ex)
            {
                MessageBox.Show("Cannot create required DirectX device.\r\n" + ex.Message);
                return;
            }

            if (_dxDevice.Device == null)
            {
                MessageBox.Show("Cannot create required DirectX device.");
                return;
            }


            // Now we can create the DXScene
            dxScene = new Ab3d.DirectX.DXScene(_dxDevice);

            // ensure we have a valid size; we will resize later to the correct size
            if (clientWindowWidth <= 0)
            {
                clientWindowWidth = 1;
            }
            if (clientWindowHeight <= 0)
            {
                clientWindowHeight = 1;
            }

            dxScene.InitializeSwapChain(_hWnd,
                                        (int)(clientWindowWidth * dpiScaleX),
                                        (int)(clientWindowHeight * dpiScaleY),
                                        preferedMultisamplingCount,
                                        (float)dpiScaleX,
                                        (float)dpiScaleY);


            wpfViewport3D = new Viewport3D();

            dxViewportView = new DXViewportView(dxScene, wpfViewport3D);


            // Because _dxViewportView is not shown in the UI, the DXEngineShoop (DXEngine's diagnostics tool) cannot find it
            // To enable using DXEngineSnoop in such cases, we can set the Application's Property:
            Application.Current.Properties["DXView"] = new WeakReference(dxViewportView);

            OnDXViewportViewInitialized();
        }