private void PresentNV12P010(MediaFrame frame, bool dispose = true) { // TODO: Possible process it directly after decoding (back to FFmpeg) to avoid Flush? try { Utilities.Dispose(ref vpiv); videoDevice1.CreateVideoProcessorInputView(frame.textureHW, vpe, vpivd, out vpiv); VideoProcessorStream vps = new VideoProcessorStream() { PInputSurface = vpiv, Enable = new RawBool(true) }; vpsa[0] = vps; videoContext1.VideoProcessorBlt(videoProcessor, vpov, 0, 1, vpsa); context.PixelShader.SetShaderResource(0, srvRGB); context.PixelShader.Set(pixelShader); } catch (Exception) { } finally { if (dispose) { Utilities.Dispose(ref frame.textureHW); } } }
/* ID3D11VideoContext::VideoProcessorBlt | https://docs.microsoft.com/en-us/windows/win32/api/d3d11/nf-d3d11-id3d11videocontext-videoprocessorblt * * HRESULT VideoProcessorBlt ( * ID3D11VideoProcessor *pVideoProcessor, * ID3D11VideoProcessorOutputView *pView, * UINT OutputFrame, * UINT StreamCount, * const D3D11_VIDEO_PROCESSOR_STREAM *pStreams ); * * 1. Opens Shared NV12 Texture (nv12SharedResource) on our SharpDX ID3Device from FFmpeg's ID3Device * 2. Creates a new Video Processor Input View that we pass in Video Processor Streams * 3. Calls Video Processor Blt to convert (in GPU) Shared NV12 Texture to our BackBuffer RBGA/BGRA Texture * 4. Finally Presents the Frame to the outputHandle (SampleUI Form) */ public void PresentFrame(Texture2D textureHW) { videoDevice1.CreateVideoProcessorInputView(textureHW, vpe, vpivd, out vpiv); VideoProcessorStream vps = new VideoProcessorStream() { PInputSurface = vpiv, Enable = new RawBool(true) }; vpsa[0] = vps; videoContext1.VideoProcessorBlt(videoProcessor, vpov, 0, 1, vpsa); _swapChain.Present(0, PresentFlags.None); Utilities.Dispose(ref vpiv); Utilities.Dispose(ref textureHW); }
/* ID3D11VideoContext::VideoProcessorBlt | https://docs.microsoft.com/en-us/windows/win32/api/d3d11/nf-d3d11-id3d11videocontext-videoprocessorblt * * HRESULT VideoProcessorBlt ( * ID3D11VideoProcessor *pVideoProcessor, * ID3D11VideoProcessorOutputView *pView, * UINT OutputFrame, * UINT StreamCount, * const D3D11_VIDEO_PROCESSOR_STREAM *pStreams ); * * 1. Opens Shared NV12 Texture (nv12SharedResource) on our SharpDX ID3Device from FFmpeg's ID3Device * 2. Creates a new Video Processor Input View that we pass in Video Processor Streams * 3. Calls Video Processor Blt to convert (in GPU) Shared NV12 Texture to our BackBuffer RBGA/BGRA Texture * 4. Finally Presents the Frame to the outputHandle (SampleUI Form) */ public void PresentFrame(IntPtr nv12SharedResource) { Texture2D nv12SharedTexture = _device.OpenSharedResource <Texture2D>(nv12SharedResource); videoDevice1.CreateVideoProcessorInputView(nv12SharedTexture, vpe, vpivd, out vpiv); VideoProcessorStream vps = new VideoProcessorStream() { PInputSurface = vpiv, Enable = new RawBool(true) }; vpsa[0] = vps; videoContext1.VideoProcessorBlt(videoProcessor, vpov, 0, 1, vpsa); _swapChain.Present(0, PresentFlags.None); Utilities.Dispose(ref vpiv); Utilities.Dispose(ref nv12SharedTexture); }