public void UpdateRenderTargetSize(CanvasDevice device, GameGeometry geometry) { RenderTargetAspectRatio = geometry.AspectRatio; if (RenderTargetAspectRatio < 0.1f) { RenderTargetAspectRatio = (float)(geometry.BaseWidth) / geometry.BaseHeight; } if (RenderTarget != null) { var currentSize = RenderTarget.Size; if (currentSize.Width >= geometry.MaxWidth && currentSize.Height >= geometry.MaxHeight) { return; } } lock (RenderTargetLock) { var size = Math.Max(Math.Max(geometry.MaxWidth, geometry.MaxHeight), RenderTargetMinSize); size = ClosestGreaterPowerTwo(size); RenderTarget?.Dispose(); RenderTargetSurface?.Dispose(); RenderTargetSurface = D3DSurfaceManager.CreateWriteableD3DSurface(device, size, size); RenderTarget = CanvasBitmap.CreateFromDirect3D11Surface(device, RenderTargetSurface); } }
public void PresentSurface(IDirect3DSurface surface) { using (var sourceTexture = Direct3D11Helpers.CreateSharpDXTexture2D(surface)) { if (!_isSwapChainSized) { var description = sourceTexture.Description; _swapChain.ResizeBuffers( 2, description.Width, description.Height, SharpDX.DXGI.Format.B8G8R8A8_UNorm, SharpDX.DXGI.SwapChainFlags.None); _isSwapChainSized = true; } using (var backBuffer = _swapChain.GetBackBuffer <SharpDX.Direct3D11.Texture2D>(0)) using (var renderTargetView = new SharpDX.Direct3D11.RenderTargetView(_d3dDevice, backBuffer)) { _d3dDevice.ImmediateContext.ClearRenderTargetView(renderTargetView, new SharpDX.Mathematics.Interop.RawColor4(0, 0, 0, 1)); _d3dDevice.ImmediateContext.CopyResource(sourceTexture, backBuffer); } } _swapChain.Present(1, SharpDX.DXGI.PresentFlags.None); }
public void CompositeFrame(CompositeVideoFrameContext context) { IDirect3DSurface outputSurface = context.OutputFrame.Direct3DSurface; using (CanvasRenderTarget renderTarget = CanvasRenderTarget.CreateFromDirect3D11Surface(_canvasDevice, outputSurface)) using (CanvasDrawingSession drawSession = renderTarget.CreateDrawingSession()) { foreach (var overlaySurface in context.SurfacesToOverlay) { var overlay = context.GetOverlayForSurface(overlaySurface); var width = (float)overlay.Position.Width; var height = (float)overlay.Position.Height; using (var overlayBitmap = CanvasBitmap.CreateFromDirect3D11Surface(_canvasDevice, overlaySurface)) using (var videoBrush = new CanvasImageBrush(_canvasDevice, overlayBitmap)) { var scale = width / overlay.Clip.GetVideoEncodingProperties().Width; videoBrush.Transform = Matrix3x2.CreateScale(scale) * Matrix3x2.CreateTranslation((float)overlay.Position.X, (float)overlay.Position.Y); drawSession.FillEllipse(new Vector2((float)overlay.Position.X + width / 2, (float)overlay.Position.Y + height / 2), width / 2, height / 2, videoBrush); } } drawSession.DrawText("Party\nTime!", new Vector2(_backgroundProperties.Width / 1.5f, 100), Windows.UI.Colors.CornflowerBlue, new CanvasTextFormat() { FontSize = (float)_backgroundProperties.Width / 13, FontWeight = new FontWeight() { Weight = 999 }, HorizontalAlignment = CanvasHorizontalAlignment.Center, VerticalAlignment = CanvasVerticalAlignment.Center }); } }
internal static IDXGISurface CreateDXGISurface(IDirect3DSurface direct3DSurface) { IDirect3DDxgiInterfaceAccess dxgiSurfaceInterfaceAccess = (IDirect3DDxgiInterfaceAccess)direct3DSurface; IntPtr surface = dxgiSurfaceInterfaceAccess.GetInterface(ID3D11Resource); return(new IDXGISurface(surface)); }
public void GraphicsInteropFromManaged() { // // This test interops a C# component (ie this one) with a C++/CX // component (GraphicsDeviceComponent) using Direct3DDevice and // Direct3DSurface to pass a IDXGIDevice and IDXGISurface between // them. // // Create a device. IDirect3DDevice graphicsDevice = NativeComponent.DeviceCreator.CreateDevice(); // We should be able to call Trim() without anything bad happening graphicsDevice.Trim(); // Now create a surface using this device. int expectedWidth = 128; int expectedHeight = 256; var expectedGraphicsFormat = DirectXPixelFormat.R32Float; IDirect3DSurface surface = NativeComponent.SurfaceCreator.CreateSurface( graphicsDevice, expectedWidth, expectedHeight, expectedGraphicsFormat); // Look at the surface description to make sure it seems valid. var desc = surface.Description; Assert.AreEqual(expectedWidth, desc.Width); Assert.AreEqual(expectedHeight, desc.Height); Assert.AreEqual(expectedGraphicsFormat, desc.Format); Assert.AreEqual(1, desc.MultisampleDescription.Count); Assert.AreEqual(0, desc.MultisampleDescription.Quality); }
public override void Draw(GameTime gameTime) { foreach (VideoComponent videoComponent in Components) { MediaPlayer mediaPlayer = videoComponent.MediaPlayer; if (mediaPlayer.PlaybackSession.PlaybackState == MediaPlaybackState.Playing) { if (videoComponent.Target != null) { using Vortice.Direct3D11.ID3D11On12Device device11On12 = ((Vortice.Direct3D11.ID3D11Device)GraphicsDevice.Direct3D11Device).QueryInterface <Vortice.Direct3D11.ID3D11On12Device>(); var d3D11RenderTarget = device11On12.CreateWrappedResource( videoComponent.Target.NativeResource, new Vortice.Direct3D11.ResourceFlags { BindFlags = (int)Direct3DBindings.ShaderResource }, (int)Vortice.Direct3D12.ResourceStates.CopyDestination, (int)Vortice.Direct3D12.ResourceStates.CopyDestination); using (Vortice.DXGI.IDXGISurface dxgiSurface = d3D11RenderTarget.QueryInterface <Vortice.DXGI.IDXGISurface>()) { IDirect3DSurface surface = Direct3DInterop.CreateDirect3DSurface(dxgiSurface); mediaPlayer.CopyFrameToVideoSurface(surface); } device11On12.ReleaseWrappedResources(d3D11RenderTarget); } } } }
private void UpdateBackBuffer() { IDirect3DSurface surface = HolographicFrame.GetRenderingParameters(HolographicFrame.CurrentPrediction.CameraPoses[0]).Direct3D11BackBuffer; IDirect3DDxgiInterfaceAccess surfaceDxgiInterfaceAccess = surface as IDirect3DDxgiInterfaceAccess; IntPtr resource = surfaceDxgiInterfaceAccess.GetInterface(ID3D11Resource); if (backBuffer == null || backBuffer.NativeResource.NativePointer != resource) { // Clean up references to previous resources. backBuffer?.Dispose(); LeftEyeBuffer?.Dispose(); RightEyeBuffer?.Dispose(); // This can change every frame as the system moves to the next buffer in the // swap chain. This mode of operation will occur when certain rendering modes // are activated. Texture2D d3DBackBuffer = new Texture2D(resource); backBuffer = new Texture(GraphicsDevice).InitializeFromImpl(d3DBackBuffer, false); LeftEyeBuffer = backBuffer.ToTextureView(new TextureViewDescription() { ArraySlice = 0, Type = ViewType.Single }); RightEyeBuffer = backBuffer.ToTextureView(new TextureViewDescription() { ArraySlice = 1, Type = ViewType.Single }); } Description.BackBufferFormat = backBuffer.Format; Description.BackBufferWidth = backBuffer.Width; Description.BackBufferHeight = backBuffer.Height; }
public static IntPtr GetPtr(IDirect3DSurface surface) { var access = (IDirect3DDxgiInterfaceAccess)surface; var d3dPointer = access.GetInterface(ID3D11Texture2D); return(d3dPointer); }
public void ProcessFrame(ProcessVideoFrameContext context)//过程帧 { IDirect3DSurface inputSurface = context.InputFrame.Direct3DSurface; IDirect3DSurface outputSurface = context.OutputFrame.Direct3DSurface; using (CanvasBitmap inputBitmap = CanvasBitmap.CreateFromDirect3D11Surface(canvasDevice, inputSurface)) using (CanvasRenderTarget renderTarget = CanvasRenderTarget.CreateFromDirect3D11Surface(canvasDevice, outputSurface)) using (CanvasDrawingSession ds = renderTarget.CreateDrawingSession()) using (CanvasImageBrush brush = new CanvasImageBrush(canvasDevice, inputBitmap)) using (CanvasCommandList textCommandList = new CanvasCommandList(canvasDevice)) { using (var clds = textCommandList.CreateDrawingSession()) { clds.DrawText( "Win2D\nMediaClip", (float)inputBitmap.Size.Width / 2, (float)inputBitmap.Size.Height / 2, brush, new CanvasTextFormat() { FontSize = (float)inputBitmap.Size.Width / 5, FontWeight = new FontWeight() { Weight = 999 }, HorizontalAlignment = CanvasHorizontalAlignment.Center, VerticalAlignment = CanvasVerticalAlignment.Center }); } GaussianBlurEffect background = new GaussianBlurEffect() { BlurAmount = 10, BorderMode = EffectBorderMode.Hard, Source = new BrightnessEffect() { BlackPoint = new Vector2(0.5f, 0.7f), Source = new SaturationEffect() { Saturation = 0, Source = inputBitmap } } }; var shadow = new ShadowEffect() { Source = textCommandList, BlurAmount = 10 }; var composite = new CompositeEffect() { Sources = { background, shadow, textCommandList } }; ds.DrawImage(composite); } }
public static Texture2D CreateSharpDXTexture2D(IDirect3DSurface surface) { var access = (IDirect3DDxgiInterfaceAccess)surface; var d3dPointer = access.GetInterface(_id3D11Texture2D); var d3dSurface = new Texture2D(d3dPointer); return(d3dSurface); }
public void Dispose() { RenderTarget?.Dispose(); RenderTarget = null; RenderTargetSurface?.Dispose(); RenderTargetSurface = null; }
public static SharpDX.Direct3D11.Texture2D CreateSharpDXTexture2D(IDirect3DSurface surface) { var access = surface.As <IDirect3DDxgiInterfaceAccess>(); var d3dPointer = access.GetInterface(ID3D11Texture2D); var d3dSurface = new SharpDX.Direct3D11.Texture2D(d3dPointer); return(d3dSurface); }
/// <summary> /// Updates resources associated with a holographic camera's swap chain. /// The app does not access the swap chain directly, but it does create /// resource views for the back buffer. /// </summary> public void CreateResourcesForBackBuffer( DeviceResources deviceResources, HolographicCameraRenderingParameters cameraParameters) { var device = deviceResources.D3DDevice; // Get the WinRT object representing the holographic camera's back buffer. IDirect3DSurface surface = cameraParameters.Direct3D11BackBuffer; // Get a DXGI interface for the holographic camera's back buffer. // Holographic cameras do not provide the DXGI swap chain, which is owned // by the system. The Direct3D back buffer resource is provided using WinRT // interop APIs. InteropStatics.IDirect3DDxgiInterfaceAccess surfaceDxgiInterfaceAccess = surface as InteropStatics.IDirect3DDxgiInterfaceAccess; IntPtr pResource = surfaceDxgiInterfaceAccess.GetInterface(InteropStatics.ID3D11Resource); Resource resource = SharpDX.CppObject.FromPointer <Resource>(pResource); Marshal.Release(pResource); // Get a Direct3D interface for the holographic camera's back buffer. Texture2D cameraBackBuffer = resource.QueryInterface <Texture2D>(); // Determine if the back buffer has changed. If so, ensure that the render target view // is for the current back buffer. if ((this.d3dBackBuffer == null) || (this.d3dBackBuffer.NativePointer != cameraBackBuffer.NativePointer)) { // This can change every frame as the system moves to the next buffer in the // swap chain. This mode of operation will occur when certain rendering modes // are activated. this.d3dBackBuffer = cameraBackBuffer; // Get the DXGI format for the back buffer. // This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat(). Texture2DDescription backBufferDesc = this.BackBufferTexture2D.Description; // backBufferDesc.SampleDescription = new SharpDX.DXGI.SampleDescription(8, 8); this.dxgiFormat = backBufferDesc.Format; // Check for render target size changes. Size currentSize = this.holographicCamera.RenderTargetSize; if (this.d3dRenderTargetSize != currentSize) { // Set render target size. this.d3dRenderTargetSize = this.HolographicCamera.RenderTargetSize; } } // Create the constant buffer, if needed. if (this.viewProjectionConstantBuffer == null) { // Create a constant buffer to store view and projection matrices for the camera. ViewProjectionConstantBuffer viewProjectionConstantBufferData = new ViewProjectionConstantBuffer(); this.viewProjectionConstantBuffer = this.ToDispose(SharpDX.Direct3D11.Buffer.Create( device, BindFlags.ConstantBuffer, ref viewProjectionConstantBufferData)); } }
public static SKImage Direct3dToSKImage(IDirect3DSurface surface) { var task = SoftwareBitmap.CreateCopyFromSurfaceAsync(surface); while (task.Status == AsyncStatus.Started) { Thread.Sleep(50); } using SoftwareBitmap bitmap = task.GetResults(); SKImage image = SoftwareBitmapToSKImage(bitmap); return(image); }
/// <summary> /// Creates a SharpDX texture from a Direct3D surface. /// </summary> /// <param name="surface">The surface to create the SharpDX texture from.</param> /// <returns>The created SharpDX texture.</returns> internal static Texture2D?CreateSharpDXTexture2D(IDirect3DSurface surface) { var access = surface as IDirect3DDxgiInterfaceAccess; var d3dPointer = access?.GetInterface(ID3D11Texture2D); if (d3dPointer != null) { var d3dSurface = new Texture2D(d3dPointer.Value); return(d3dSurface); } return(null); }
private ID3D11Texture2D GetHolographicBackBuffer() { HolographicSurface = HolographicFrame.GetRenderingParameters(HolographicFrame.CurrentPrediction.CameraPoses[0]).Direct3D11BackBuffer; using IDXGISurface surface = Direct3DInterop.CreateDXGISurface(HolographicSurface); ID3D11Texture2D d3DBackBuffer = new ID3D11Texture2D(surface.NativePointer); PresentationParameters.BackBufferFormat = (PixelFormat)d3DBackBuffer.Description.Format; PresentationParameters.BackBufferWidth = d3DBackBuffer.Description.Width; PresentationParameters.BackBufferHeight = d3DBackBuffer.Description.Height; return(d3DBackBuffer); }
internal static IDirect3DSurface CreateDirect3DSurface(IntPtr dxgiSurface) { uint hr = CreateDirect3D11SurfaceFromDXGISurface(dxgiSurface, out IntPtr inspectableSurface); IDirect3DSurface d3DSurface = null; if (hr == 0) { d3DSurface = Marshal.GetObjectForIUnknown(inspectableSurface) as IDirect3DSurface; Marshal.Release(inspectableSurface); } return(d3DSurface); }
private void CreateD3D11Surface() { SharpDX.DXGI.Surface2 surface = m_texture.QueryInterface <SharpDX.DXGI.Surface2>(); IntPtr pUnknown; UInt32 hr = InteropStatics.CreateDirect3D11SurfaceFromDXGISurface(surface.NativePointer, out pUnknown); if (hr == 0) { d3dInteropSurface = (IDirect3DSurface)Marshal.GetObjectForIUnknown(pUnknown); Marshal.Release(pUnknown); } m_mediaPlayer.VideoFrameAvailable += M_mediaPlayer_VideoFrameAvailable; m_mediaPlayer.Play(); }
internal static IDirect3DSurface CreateDirect3DSurface(IDXGISurface dxgiSurface) { Result result = CreateDirect3D11SurfaceFromDXGISurface(dxgiSurface.NativePointer, out IntPtr graphicsSurface); if (result.Failure) { throw new InvalidOperationException(result.Code.ToString()); } IDirect3DSurface d3DSurface = (IDirect3DSurface)Marshal.GetObjectForIUnknown(graphicsSurface); Marshal.Release(graphicsSurface); return(d3DSurface); }
internal static IDirect3DSurface CreateDirect3DSurface(IDXGISurface dxgiSurface) { Result result = CreateDirect3D11SurfaceFromDXGISurface(dxgiSurface.NativePointer, out IntPtr graphicsSurface); if (result.Failure) { throw new COMException("Surface creation failed.", result.Code); } IDirect3DSurface d3DSurface = (IDirect3DSurface)Marshal.GetObjectForIUnknown(graphicsSurface); Marshal.Release(graphicsSurface); return(d3DSurface); }
public static IDirect3DSurface CreateDirect3DSurface(IntPtr dxgiSurface) { IntPtr inspectableSurface; uint hr = CreateDirect3D11SurfaceFromDXGISurface(dxgiSurface, out inspectableSurface); IDirect3DSurface depthD3DSurface = null; if (hr == 0) { depthD3DSurface = (IDirect3DSurface)Marshal.GetObjectForIUnknown(inspectableSurface); Marshal.Release(inspectableSurface); } return(depthD3DSurface); }
public static IDirect3DSurface CreateDirect3DSurfaceFromSharpDXTexture(SharpDX.Direct3D11.Texture2D texture) { IDirect3DSurface surface = null; // Acquire the DXGI interface for the Direct3D surface. using (var dxgiSurface = texture.QueryInterface <SharpDX.DXGI.Surface>()) { // Wrap the native device using a WinRT interop object. uint hr = CreateDirect3D11SurfaceFromDXGISurface(dxgiSurface.NativePointer, out IntPtr pUnknown); if (hr == 0) { surface = Marshal.GetObjectForIUnknown(pUnknown) as IDirect3DSurface; Marshal.Release(pUnknown); } } return(surface); }
public void CopyFrameToVideoSurface(IDirect3DSurface destination) { if (destination != null) { try { MediaPlayer.CopyFrameToVideoSurface(destination); } catch (Exception exception) { switch (exception.HResult) { //GPU 设备实例已经暂停 case -2005270523: break; default: throw new Exception(exception.Message); } } } }
private async Task <SoftwareBitmap> CreateSoftwareBitmapFromSurface(IDirect3DSurface surface) { return(await SoftwareBitmap.CreateCopyFromSurfaceAsync(surface)); }
/// <summary> /// Renders the current frame to each holographic display, according to the /// current application and spatial positioning state. Returns true if the /// frame was rendered to at least one display. /// </summary> public bool Render(HolographicFrame holographicFrame) { // Don't try to render anything before the first Update. if (timer.FrameCount == 0) { return(false); } // // TODO: Add code for pre-pass rendering here. // // Take care of any tasks that are not specific to an individual holographic // camera. This includes anything that doesn't need the final view or projection // matrix, such as lighting maps. // // Up-to-date frame predictions enhance the effectiveness of image stablization and // allow more accurate positioning of holograms. holographicFrame.UpdateCurrentPrediction(); HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Lock the set of holographic camera resources, then draw to each camera // in this frame. return(deviceResources.UseHolographicCameraResources( (Dictionary <uint, CameraResources> cameraResourceDictionary) => { bool atLeastOneCameraRendered = false; foreach (var cameraPose in prediction.CameraPoses) { // This represents the device-based resources for a HolographicCamera. CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id]; // Get the device context. var context = deviceResources.D3DDeviceContext; var renderTargetView = cameraResources.BackBufferRenderTargetView; var depthStencilView = cameraResources.DepthStencilView; // Set render targets to the current holographic camera. context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView); // Clear the back buffer and depth stencil view. if (canGetHolographicDisplayForCamera && cameraPose.HolographicCamera.Display.IsOpaque) { SharpDX.Mathematics.Interop.RawColor4 cornflowerBlue = new SharpDX.Mathematics.Interop.RawColor4(0.392156899f, 0.58431375f, 0.929411829f, 1.0f); context.ClearRenderTargetView(renderTargetView, cornflowerBlue); } else { SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f); context.ClearRenderTargetView(renderTargetView, transparent); } context.ClearDepthStencilView( depthStencilView, SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil, 1.0f, 0); // // TODO: Replace the sample content with your own content. // // Notes regarding holographic content: // * For drawing, remember that you have the potential to fill twice as many pixels // in a stereoscopic render target as compared to a non-stereoscopic render target // of the same resolution. Avoid unnecessary or repeated writes to the same pixel, // and only draw holograms that the user can see. // * To help occlude hologram geometry, you can create a depth map using geometry // data obtained via the surface mapping APIs. You can use this depth map to avoid // rendering holograms that are intended to be hidden behind tables, walls, // monitors, and so on. // * On HolographicDisplays that are transparent, black pixels will appear transparent // to the user. On such devices, you should clear the screen to Transparent as shown // above. You should still use alpha blending to draw semitransparent holograms. // // The view and projection matrices for each holographic camera will change // every frame. This function refreshes the data in the constant buffer for // the holographic camera indicated by cameraPose. if (stationaryReferenceFrame != null) { cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, stationaryReferenceFrame.CoordinateSystem); } // Attach the view/projection constant buffer for this camera to the graphics pipeline. bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources); #if DRAW_SAMPLE_CONTENT // Only render world-locked content when positional tracking is active. if (cameraActive) { // Draw the sample hologram. spinningCubeRenderer.Render(); if (canCommitDirect3D11DepthBuffer) { // On versions of the platform that support the CommitDirect3D11DepthBuffer API, we can // provide the depth buffer to the system, and it will use depth information to stabilize // the image at a per-pixel level. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); SharpDX.Direct3D11.Texture2D depthBuffer = cameraResources.DepthBufferTexture2D; // Direct3D interop APIs are used to provide the buffer to the WinRT API. SharpDX.DXGI.Resource1 depthStencilResource = depthBuffer.QueryInterface <SharpDX.DXGI.Resource1>(); SharpDX.DXGI.Surface2 depthDxgiSurface = new SharpDX.DXGI.Surface2(depthStencilResource, 0); IDirect3DSurface depthD3DSurface = InteropStatics.CreateDirect3DSurface(depthDxgiSurface.NativePointer); if (depthD3DSurface != null) { // Calling CommitDirect3D11DepthBuffer causes the system to queue Direct3D commands to // read the depth buffer. It will then use that information to stabilize the image as // the HolographicFrame is presented. renderingParameters.CommitDirect3D11DepthBuffer(depthD3DSurface); } } } #endif atLeastOneCameraRendered = true; } return atLeastOneCameraRendered; })); }
private void CopyBitmap(IDirect3DSurface surface, SharpDX.Direct3D11.Texture2D screenTexture2D, int Width, int Height) { //isWait = true; try { //screenTexture 카피대상 //screenTexture2D 화면 var textureDesc = new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = Format.B8G8R8A8_UNorm, Width = Width, Height = Height, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging }; using (var screenTexture = new Texture2D(d3dDevice, textureDesc)) { d3dDevice.ImmediateContext.CopyResource(screenTexture2D, screenTexture); var mapSource = d3dDevice.ImmediateContext.MapSubresource(screenTexture, 0, MapMode.Read, MapFlags.None); //var bitmap = new System.Drawing.Bitmap(Width, Height, PixelFormat.Format32bppArgb); var boundsRect = new System.Drawing.Rectangle(0, 0, Width, Height); ; //var mapDest = bitmap.LockBits(boundsRect, ImageLockMode.WriteOnly, bitmap.PixelFormat); var sourcePtr = mapSource.DataPointer; //var destPtr = mapDest.Scan0; byte[] managedArray = new byte[Width * Height * 4]; for (int y = 0; y < Height; y++) { System.Runtime.InteropServices.Marshal.Copy(sourcePtr, managedArray, y * Width * 4, Width * 4); sourcePtr = IntPtr.Add(sourcePtr, mapSource.RowPitch); } //bitmap.UnlockBits(mapDest); d3dDevice.ImmediateContext.UnmapSubresource(screenTexture, 0); //array = ConvertBitmapToByteArray(bitmap); array = managedArray; lastX = Width; lastY = Height; if (hWnd != IntPtr.Zero) { Rect rect = new Rect(); GetWindowRect(hWnd, ref rect); lastPositionX = rect.Left; lastPositionY = rect.Top; Console.WriteLine(rect.Left + " / " + rect.Right + " / " + rect.Top + " / " + rect.Bottom); } screenTexture.Dispose(); //bitmap.Save("./result.png"); //bitmap.Dispose(); isDataSuccess = true; } } catch (Exception e) { Console.WriteLine("Error - " + e.ToString()); } isWait = false; }
/// <summary> /// Updates resources associated with a holographic camera's swap chain. /// The app does not access the swap chain directly, but it does create /// resource views for the back buffer. /// </summary> public void CreateResourcesForBackBuffer( DeviceResources deviceResources, HolographicCameraRenderingParameters cameraParameters ) { var device = deviceResources.D3DDevice; // Get the WinRT object representing the holographic camera's back buffer. IDirect3DSurface surface = cameraParameters.Direct3D11BackBuffer; // Get a DXGI interface for the holographic camera's back buffer. // Holographic cameras do not provide the DXGI swap chain, which is owned // by the system. The Direct3D back buffer resource is provided using WinRT // interop APIs. InteropStatics.IDirect3DDxgiInterfaceAccess surfaceDxgiInterfaceAccess = surface as InteropStatics.IDirect3DDxgiInterfaceAccess; IntPtr pResource = surfaceDxgiInterfaceAccess.GetInterface(InteropStatics.ID3D11Resource); SharpDX.Direct3D11.Resource resource = SharpDX.Direct3D11.Resource.FromPointer <SharpDX.Direct3D11.Resource>(pResource); Marshal.Release(pResource); // Get a Direct3D interface for the holographic camera's back buffer. Texture2D cameraBackBuffer = resource.QueryInterface <Texture2D>(); // Determine if the back buffer has changed. If so, ensure that the render target view // is for the current back buffer. if ((null == d3dBackBuffer) || (d3dBackBuffer.NativePointer != cameraBackBuffer.NativePointer)) { // This can change every frame as the system moves to the next buffer in the // swap chain. This mode of operation will occur when certain rendering modes // are activated. d3dBackBuffer = cameraBackBuffer; // Create a render target view of the back buffer. // Creating this resource is inexpensive, and is better than keeping track of // the back buffers in order to pre-allocate render target views for each one. d3dRenderTargetView = this.ToDispose(new RenderTargetView(device, BackBufferTexture2D)); // Get the DXGI format for the back buffer. // This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat(). Texture2DDescription backBufferDesc = BackBufferTexture2D.Description; dxgiFormat = backBufferDesc.Format; // Check for render target size changes. Size currentSize = holographicCamera.RenderTargetSize; if (d3dRenderTargetSize != currentSize) { // Set render target size. d3dRenderTargetSize = HolographicCamera.RenderTargetSize; // A new depth stencil view is also needed. this.RemoveAndDispose(ref d3dDepthStencilView); } } // Refresh depth stencil resources, if needed. if (null == DepthStencilView) { // Create a depth stencil view for use with 3D rendering if needed. var depthStencilDesc = new Texture2DDescription { Format = SharpDX.DXGI.Format.D16_UNorm, Width = (int)RenderTargetSize.Width, Height = (int)RenderTargetSize.Height, ArraySize = IsRenderingStereoscopic ? 2 : 1, // Create two textures when rendering in stereo. MipLevels = 1, // Use a single mipmap level. BindFlags = BindFlags.DepthStencil, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0) }; using (var depthStencil = new Texture2D(device, depthStencilDesc)) { var depthStencilViewDesc = new DepthStencilViewDescription(); depthStencilViewDesc.Dimension = IsRenderingStereoscopic ? DepthStencilViewDimension.Texture2DArray : DepthStencilViewDimension.Texture2D; depthStencilViewDesc.Texture2DArray.ArraySize = IsRenderingStereoscopic ? 2 : 0; d3dDepthStencilView = this.ToDispose(new DepthStencilView(device, depthStencil, depthStencilViewDesc)); } } // Create the constant buffer, if needed. if (null == viewProjectionConstantBuffer) { // Create a constant buffer to store view and projection matrices for the camera. ViewProjectionConstantBuffer viewProjectionConstantBufferData = new ViewProjectionConstantBuffer(); viewProjectionConstantBuffer = this.ToDispose(SharpDX.Direct3D11.Buffer.Create( device, BindFlags.ConstantBuffer, ref viewProjectionConstantBufferData)); } }
// <SnippetCreateSoftwareBitmapFromSurface> private async void CreateSoftwareBitmapFromSurface(IDirect3DSurface surface) { softwareBitmap = await SoftwareBitmap.CreateCopyFromSurfaceAsync(surface); }