示例#1
0
 public iImmediateDrawContext begin(ref Matrix3x2 rootTransform, SwapChainFormats swapFormat, ITextureView rgbTarget, bool opaqueColor)
 {
     this.rgbTarget     = rgbTarget;
     this.rootTransform = rootTransform;
     transform.clear();
     cleared = opaqueColor;
     return(this);
 }
示例#2
0
        public RenderBase(IRenderDevice device, CSize renderTargetSize, SwapChainFormats formats, Vector4 borderColor, sDecodedVideoSize videoSize)
        {
            this.videoSize = videoSize;
            // Create vertex buffer
            vertexBuffer = createVideoVertexBuffer(device, renderTargetSize, ref videoSize);

            // Create pipeline state
            var pso = new PipelineStateDesc(false);

            pso.GraphicsPipeline.DepthStencilDesc.DepthEnable = false;
            pso.GraphicsPipeline.PrimitiveTopology            = PrimitiveTopology.TriangleList;
            pso.GraphicsPipeline.NumRenderTargets             = 1;
            pso.GraphicsPipeline.setRTVFormat(0, formats.color);
            pso.GraphicsPipeline.DSVFormat         = formats.depth;
            pso.ResourceLayout.DefaultVariableType = ShaderResourceVariableType.Static;

            var            compiler = device.GetShaderFactory();
            iStorageFolder assets   = StorageFolder.embeddedResources(System.Reflection.Assembly.GetExecutingAssembly(), resourceFolder);

            using (var psf = device.CreatePipelineStateFactory())
            {
                psf.setName("Video PSO");
                setupVideoInputLayout(psf);

                using (var vs = compiler.compileHlslFile(assets, "VideoVS.hlsl", ShaderType.Vertex))
                    psf.graphicsVertexShader(vs);

                (string uvMin, string uvMax) = videoUvCroppedRect(ref videoSize);
                string colorString = Utils.printFloat4(borderColor);
                using (var ps = compilePixelShader(compiler, assets, uvMin, uvMax, colorString))
                    psf.graphicsPixelShader(ps);

                psf.layoutVariable(ShaderType.Pixel, ShaderResourceVariableType.Dynamic, varTexture);

                var sampler = new SamplerDesc(false)
                {
                    MipFilter = FilterType.Point,
                };
                psf.layoutStaticSampler(ShaderType.Pixel, ref sampler, varTexture);

                psf.apply(ref pso);
                pipelineState = device.CreatePipelineState(ref pso);
            }

            // Create resource binding and cache the variable, we gonna need both on every frame rendered
            binding         = pipelineState.CreateShaderResourceBinding(true);
            textureVariable = binding.GetVariableByName(ShaderType.Pixel, varTexture);
        }
示例#3
0
 public LinuxRender(IRenderDevice device, CSize renderTargetSize, SwapChainFormats formats, Vector4 borderColor, iVideoTextureSource source) :
     base(device, renderTargetSize, formats, borderColor, source.videoSize)
 {
     this.source = source;
 }
示例#4
0
文件: Engine.cs 项目: zeta1999/Vrmac
 public override iVideoRenderState createRenderer(IRenderDevice device, CSize renderTargetSize, SwapChainFormats formats, Vector4 borderColor)
 {
     setNv12PresentMode();
     return(new Render.LinuxRender(device, renderTargetSize, formats, borderColor, this));
 }
示例#5
0
 public WindowsRender(IRenderDevice device, CSize renderTargetSize, SwapChainFormats formats, Vector4 borderColor, iMediaEngine mediaEngine) :
     base(device, renderTargetSize, formats, borderColor, new sDecodedVideoSize(mediaEngine.nativeVideoSize))
 {
     this.mediaEngine = mediaEngine;
     videoTexture     = mediaEngine.createFrameTexture(device, formats.color);
 }