Example #1
0
        /// <summary>
        /// Gets the view-projection constant buffer for the display, and attaches it
        /// to the shader pipeline.
        /// </summary>
        public bool AttachViewProjectionBuffer(DeviceResources deviceResources)
        {
            // This method uses Direct3D device-based resources.
            var context = deviceResources.D3DDeviceContext;

            // Loading is asynchronous. Resources must be created before they can be updated.
            // Cameras can also be added asynchronously, in which case they must be initialized
            // before they can be used.
            if (context == null || viewProjectionConstantBuffer == null || !framePending)
            {
                return(false);
            }

            // Set the viewport for this camera.
            context.Rasterizer.SetViewport(Viewport);

            // Send the constant buffer to the vertex shader.
            context.VertexShader.SetConstantBuffers(1, viewProjectionConstantBuffer);

            // The template includes a pass-through geometry shader that is used by
            // default on systems that don't support the D3D11_FEATURE_D3D11_OPTIONS3::
            // VPAndRTArrayIndexFromAnyShaderFeedingRasterizer extension. The shader
            // will be enabled at run-time on systems that require it.
            // If your app will also use the geometry shader for other tasks and those
            // tasks require the view/projection matrix, uncomment the following line
            // of code to send the constant buffer to the geometry shader as well.
            //context.GeometryShader.SetConstantBuffers(1, viewProjectionConstantBuffer);

            framePending = false;

            return(true);
        }
 public RenderableCubemap(DeviceResources resources, Vector3 position, int resolution = 1024, int mipCount = 1)
 {
     Resources              = resources;
     Position               = position;
     Resolution             = resolution;
     MipCount               = mipCount;
     MipRenderTargetViews   = new RenderTargetView[MipCount];
     MipShaderResourceViews = new ShaderResourceView[MipCount];
     RenderTargetViews      = new ReadOnlyCollection <RenderTargetView>(MipRenderTargetViews);
     ShaderResourceViews    = new ReadOnlyCollection <ShaderResourceView>(MipShaderResourceViews);
 }
        public static void ExportTexture(DeviceResources resources, StorageFile textureFile, Texture2D texture)
        {
            var device  = resources.D3DDevice;
            var context = resources.D3DDeviceContext;

            var textureToSave = texture;
            var outputTexture = new Texture2D(device, new Texture2DDescription
            {
                Width             = textureToSave.Description.Width,
                Height            = textureToSave.Description.Height,
                MipLevels         = 1,
                ArraySize         = 1,
                Format            = textureToSave.Description.Format,
                Usage             = ResourceUsage.Staging,
                SampleDescription = new SampleDescription(1, 0),
                BindFlags         = BindFlags.None,
                CpuAccessFlags    = CpuAccessFlags.Read,
                OptionFlags       = ResourceOptionFlags.None
            });

            context.CopyResource(textureToSave, outputTexture);
            var mappedResource = context.MapSubresource(outputTexture, 0, 0, MapMode.Read, SharpDX.Direct3D11.MapFlags.None, out var dataStream);
            var dataRectangle  = new DataRectangle
            {
                DataPointer = dataStream.DataPointer,
                Pitch       = mappedResource.RowPitch
            };
            var imagingFactory = new ImagingFactory();
            var bitmap         = new Bitmap(imagingFactory, outputTexture.Description.Width, outputTexture.Description.Height, PixelFormat.Format32bppRGBA, dataRectangle);

            using (var stream = new MemoryStream())
                using (var bitmapEncoder = new PngBitmapEncoder(imagingFactory, stream))
                    using (var bitmapFrame = new BitmapFrameEncode(bitmapEncoder))
                    {
                        bitmapFrame.Initialize();
                        bitmapFrame.SetSize(bitmap.Size.Width, bitmap.Size.Height);
                        var pixelFormat = PixelFormat.FormatDontCare;
                        bitmapFrame.SetPixelFormat(ref pixelFormat);
                        bitmapFrame.WriteSource(bitmap);
                        bitmapFrame.Commit();
                        bitmapEncoder.Commit();
                        FileIO.WriteBytesAsync(textureFile, stream.ToArray()).AsTask().Wait(-1);
                    }
            context.UnmapSubresource(outputTexture, 0);
            outputTexture.Dispose();
            bitmap.Dispose();
        }
Example #4
0
        /// <summary>
        /// Releases resources associated with a holographic display back buffer.
        /// </summary>
        public void ReleaseResourcesForBackBuffer(DeviceResources deviceResources)
        {
            var context = deviceResources.D3DDeviceContext;

            RemoveAndDispose(ref d3dBackBuffer);
            RemoveAndDispose(ref d3dRenderTargetView);
            RemoveAndDispose(ref d3dDepthStencilView);

            const int D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT = 8;

            RenderTargetView[] nullViews = new RenderTargetView[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT];

            // Ensure system references to the back buffer are released by clearing the render
            // target from the graphics pipeline state, and then flushing the Direct3D context.
            context.OutputMerger.SetRenderTargets(null, nullViews);
            context.Flush();
        }
        public static async Task <Model> LoadObj(DeviceResources deviceResources, string path)
        {
            var folder      = Windows.ApplicationModel.Package.Current.InstalledLocation;
            var fileContent = await FileIO.ReadLinesAsync(await folder.GetFileAsync(path));

            var positions = new List <Vector3>();
            var uvs       = new List <Vector2>();
            var normals   = new List <Vector3>();
            var output    = new List <VertexPositionNormalUV>();

            foreach (var line in fileContent)
            {
                var parts = line.Split(' ');
                if (parts[0] == "v")
                {
                    positions.Add(new Vector3(float.Parse(parts[1]), float.Parse(parts[2]), float.Parse(parts[3])));
                }
                else if (parts[0] == "vt")
                {
                    uvs.Add(new Vector2(float.Parse(parts[1]), float.Parse(parts[2])));
                }
                else if (parts[0] == "vn")
                {
                    normals.Add(new Vector3(float.Parse(parts[1]), float.Parse(parts[2]), float.Parse(parts[3])));
                }
                else if (parts[0] == "f")
                {
                    for (int i = 1; i <= 3; i++)
                    {
                        var indices = parts[i].Split('/').Select(index => int.Parse(index) - 1).ToArray();
                        output.Add(new VertexPositionNormalUV
                        {
                            Position = positions[indices[0]],
                            UV       = uvs[indices[1]],
                            Normal   = normals[indices[2]]
                        });
                    }
                }
            }
            return(new Model(deviceResources, output.ToArray()));
        }
Example #6
0
        /// <summary>
        /// Updates resources associated with a holographic camera's swap chain.
        /// The app does not access the swap chain directly, but it does create
        /// resource views for the back buffer.
        /// </summary>
        public void CreateResourcesForBackBuffer(
            DeviceResources deviceResources,
            HolographicCameraRenderingParameters cameraParameters
            )
        {
            var device = deviceResources.D3DDevice;

            // Get the WinRT object representing the holographic camera's back buffer.
            IDirect3DSurface surface = cameraParameters.Direct3D11BackBuffer;

            // Get a DXGI interface for the holographic camera's back buffer.
            // Holographic cameras do not provide the DXGI swap chain, which is owned
            // by the system. The Direct3D back buffer resource is provided using WinRT
            // interop APIs.
            InteropStatics.IDirect3DDxgiInterfaceAccess surfaceDxgiInterfaceAccess = surface as InteropStatics.IDirect3DDxgiInterfaceAccess;
            IntPtr pResource = surfaceDxgiInterfaceAccess.GetInterface(InteropStatics.ID3D11Resource);

            SharpDX.Direct3D11.Resource resource = SharpDX.Direct3D11.Resource.FromPointer <SharpDX.Direct3D11.Resource>(pResource);
            Marshal.Release(pResource);

            // Get a Direct3D interface for the holographic camera's back buffer.
            Texture2D cameraBackBuffer = resource.QueryInterface <Texture2D>();

            // Determine if the back buffer has changed. If so, ensure that the render target view
            // is for the current back buffer.
            if ((null == d3dBackBuffer) || (d3dBackBuffer.NativePointer != cameraBackBuffer.NativePointer))
            {
                // This can change every frame as the system moves to the next buffer in the
                // swap chain. This mode of operation will occur when certain rendering modes
                // are activated.
                d3dBackBuffer = cameraBackBuffer;

                // Create a render target view of the back buffer.
                // Creating this resource is inexpensive, and is better than keeping track of
                // the back buffers in order to pre-allocate render target views for each one.
                d3dRenderTargetView = ToDispose(new RenderTargetView(device, BackBufferTexture2D));

                // Get the DXGI format for the back buffer.
                // This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat().
                Texture2DDescription backBufferDesc = BackBufferTexture2D.Description;
                dxgiFormat = backBufferDesc.Format;

                // Check for render target size changes.
                Size currentSize = holographicCamera.RenderTargetSize;
                if (d3dRenderTargetSize != currentSize)
                {
                    // Set render target size.
                    d3dRenderTargetSize = HolographicCamera.RenderTargetSize;

                    // A new depth stencil view is also needed.
                    RemoveAndDispose(ref d3dDepthStencilView);
                }
            }

            // Refresh depth stencil resources, if needed.
            if (null == DepthStencilView)
            {
                // Create a depth stencil view for use with 3D rendering if needed.
                var depthStencilDesc = new Texture2DDescription
                {
                    Format            = SharpDX.DXGI.Format.D16_UNorm,
                    Width             = (int)RenderTargetSize.Width,
                    Height            = (int)RenderTargetSize.Height,
                    ArraySize         = IsRenderingStereoscopic ? 2 : 1, // Create two textures when rendering in stereo.
                    MipLevels         = 1,                               // Use a single mipmap level.
                    BindFlags         = BindFlags.DepthStencil,
                    SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0)
                };

                using (var depthStencil = new Texture2D(device, depthStencilDesc))
                {
                    var depthStencilViewDesc = new DepthStencilViewDescription();
                    depthStencilViewDesc.Dimension = IsRenderingStereoscopic ? DepthStencilViewDimension.Texture2DArray : DepthStencilViewDimension.Texture2D;
                    depthStencilViewDesc.Texture2DArray.ArraySize = IsRenderingStereoscopic ? 2 : 0;
                    d3dDepthStencilView = ToDispose(new DepthStencilView(device, depthStencil, depthStencilViewDesc));
                }
            }

            // Create the constant buffer, if needed.
            if (null == viewProjectionConstantBuffer)
            {
                // Create a constant buffer to store view and projection matrices for the camera.
                ViewProjectionConstantBuffer viewProjectionConstantBufferData = new ViewProjectionConstantBuffer();
                viewProjectionConstantBuffer = ToDispose(SharpDX.Direct3D11.Buffer.Create(
                                                             device,
                                                             BindFlags.ConstantBuffer,
                                                             ref viewProjectionConstantBufferData));
            }
        }
Example #7
0
        /// <summary>
        /// Updates the constant buffer for the display with view and projection
        /// matrices for the current frame.
        /// </summary>
        public void UpdateViewProjectionBuffer(
            DeviceResources deviceResources,
            HolographicCameraPose cameraPose,
            SpatialCoordinateSystem coordinateSystem
            )
        {
            // The system changes the viewport on a per-frame basis for system optimizations.
            d3dViewport.X        = (float)cameraPose.Viewport.Left;
            d3dViewport.Y        = (float)cameraPose.Viewport.Top;
            d3dViewport.Width    = (float)cameraPose.Viewport.Width;
            d3dViewport.Height   = (float)cameraPose.Viewport.Height;
            d3dViewport.MinDepth = 0;
            d3dViewport.MaxDepth = 1;

            // The projection transform for each frame is provided by the HolographicCameraPose.
            HolographicStereoTransform cameraProjectionTransform = cameraPose.ProjectionTransform;

            // Get a container object with the view and projection matrices for the given
            // pose in the given coordinate system.
            HolographicStereoTransform?viewTransformContainer = cameraPose.TryGetViewTransform(coordinateSystem);

            // If TryGetViewTransform returns null, that means the pose and coordinate system
            // cannot be understood relative to one another; content cannot be rendered in this
            // coordinate system for the duration of the current frame.
            // This usually means that positional tracking is not active for the current frame, in
            // which case it is possible to use a SpatialLocatorAttachedFrameOfReference to render
            // content that is not world-locked instead.
            ViewProjectionConstantBuffer viewProjectionConstantBufferData = new ViewProjectionConstantBuffer();
            bool viewTransformAcquired = viewTransformContainer.HasValue;

            if (viewTransformAcquired)
            {
                // Otherwise, the set of view transforms can be retrieved.
                HolographicStereoTransform viewCoordinateSystemTransform = viewTransformContainer.Value;

                // Update the view matrices. Holographic cameras (such as Microsoft HoloLens) are
                // constantly moving relative to the world. The view matrices need to be updated
                // every frame.
                viewProjectionConstantBufferData.viewProjectionLeft = Matrix4x4.Transpose(
                    viewCoordinateSystemTransform.Left * cameraProjectionTransform.Left
                    );
                viewProjectionConstantBufferData.viewProjectionRight = Matrix4x4.Transpose(
                    viewCoordinateSystemTransform.Right * cameraProjectionTransform.Right
                    );
            }

            // Use the D3D device context to update Direct3D device-based resources.
            var context = deviceResources.D3DDeviceContext;

            // Loading is asynchronous. Resources must be created before they can be updated.
            if (context == null || viewProjectionConstantBuffer == null || !viewTransformAcquired)
            {
                framePending = false;
            }
            else
            {
                // Update the view and projection matrices.
                context.UpdateSubresource(ref viewProjectionConstantBufferData, viewProjectionConstantBuffer);

                framePending = true;
            }
        }
Example #8
0
 public void ReleaseAllDeviceResources(DeviceResources deviceResources)
 {
     ReleaseResourcesForBackBuffer(deviceResources);
     RemoveAndDispose(ref viewProjectionConstantBuffer);
 }
Example #9
0
 public MeshTextureSet(DeviceResources resources, int resolution)
 {
     Resources  = resources;
     Resolution = resolution;
 }