/// <summary> /// Applies the layer to an aperture texture. The output /// is to be understood as the transmittance through any /// pixel, and will be blended multiplicatively together /// with other layers (as such, order does not matter). /// </summary> /// <param name="context">The device context.</param> /// <param name="output">The output aperture.</param> /// <param name="profile">An optical profile.</param> /// <param name="pass">A SurfacePass instance.</param> /// <param name="time">The elapsed time.</param> /// <param name="dt">The time since last call.</param> public abstract void ApplyLayer(DeviceContext context, GraphicsResource output, OpticalProfile profile, SurfacePass pass, double time, double dt);
public void Render(RenderTargetView renderTargetView, DeviceContext context, SurfacePass pass) { // TODO: generate sky envmap here, with custom params pass.Pass(context, @" struct PS_IN { float4 pos : SV_POSITION; float2 tex : TEXCOORD; }; float3 main(PS_IN input) : SV_Target { float phi = 2 * 3.14159265 * input.tex.x; float theta = 3.14159265 * input.tex.y; float3 p = float3(sin(theta) * cos(phi), cos(theta), sin(theta) * sin(phi)); float brightness = 500; if (p.y < 0) return lerp(float3(1, 1, 1), float3(0, 0, 0), -p.y) * brightness; /* TEMPORARY */ float sunBrightness = (dot(p, normalize(float3(-0.5f, 0.8f, 0.9f))) > 0.9995f) ? 1 : 0; return lerp(float3(1, 1, 1), float3(0.7f, 0.7f, 1), p.y) * brightness + sunBrightness * 50000; } ", skyEnvMap.Dimensions, skyEnvMap.RTV, null, null); context.OutputMerger.SetRenderTargets((RenderTargetView)null); context.ClearDepthStencilView(depthStencilView, DepthStencilClearFlags.Depth, 1.0f, 0); context.ClearRenderTargetView(renderTargetView, new Color4(0.5f, 0, 1, 1)); context.Rasterizer.State = rasterizerState; context.OutputMerger.DepthStencilState = depthStencilState; context.OutputMerger.SetTargets(depthStencilView, renderTargetView); context.Rasterizer.SetViewports(new[] { new ViewportF(0, 0, RenderDimensions.Width, RenderDimensions.Height) }); context.Rasterizer.SetScissorRectangles(new[] { new SharpDX.Rectangle(0, 0, RenderDimensions.Width, RenderDimensions.Height) }); context.VertexShader.Set(vertexShader); context.InputAssembler.InputLayout = inputLayout; context.PixelShader.SetShaderResource(0, skyEnvMap.SRV); context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; { DataStream cameraStream; context.MapSubresource(cameraBuffer, MapMode.WriteDiscard, MapFlags.None, out cameraStream); camera.WriteTo(cameraStream); context.UnmapSubresource(cameraBuffer, 0); cameraStream.Dispose(); } context.VertexShader.SetConstantBuffer(0, cameraBuffer); context.PixelShader.SetConstantBuffer(0, cameraBuffer); foreach (Model model in models.Values) model.Render(context, camera, materials, proxy); }
/// <summary> /// Generates the diffraction spectrum of a texture. The source texture /// must be the exact resolution specified in the constructor, however, /// the output will be resized to the destination texture as needed. /// </summary> /// <param name="device">The graphics device to use.</param> /// <param name="context">The device context to use.</param> /// <param name="pass">A SurfacePass instance.</param> /// <param name="renderSize">The dimensions of the render target.</param> /// <param name="destination">The destination render target view.</param> /// <param name="source">The source texture, can be the same resource as the render target.</param> /// <param name="fNumber">The distance at which to evaluate the aperture transmission function.</param> public void Diffract(Device device, DeviceContext context, SurfacePass pass, Size renderSize, RenderTargetView destination, ShaderResourceView source, double fNumber) { if (source.Description.Dimension != ShaderResourceViewDimension.Texture2D) throw new ArgumentException("Source SRV must point to a Texture2D resource of suitable dimensions."); //if (new Size(source.ResourceAs<Texture2D>().Description.Width, source.ResourceAs<Texture2D>().Description.Height) != resolution) // throw new ArgumentException("Source texture must be the same dimensions as diffraction resolution."); pass.Pass(context, Encoding.ASCII.GetString(Resources.DiffractionTexToBuf), new ViewportF(0, 0, resolution.Width, resolution.Height), null, new[] { source }, new[] { buffer.view }, null); DataStream cbuffer = new DataStream(8, true, true); cbuffer.Write<uint>((uint)resolution.Width); cbuffer.Write<uint>((uint)resolution.Height); cbuffer.Position = 0; UnorderedAccessView fftView = fft.ForwardTransform(buffer.view); pass.Pass(context, Encoding.ASCII.GetString(Resources.DiffractionBufToTex), new ViewportF(0, 0, transform.Dimensions.Width, transform.Dimensions.Height), transform.RTV, null, new[] { fftView }, cbuffer); fftView.Dispose(); cbuffer.Dispose(); cbuffer = new DataStream(4, true, true); cbuffer.Write<float>((float)fNumber); cbuffer.Position = 0; pass.Pass(context, Encoding.ASCII.GetString(Resources.DiffractionSpectrum), spectrum.Dimensions, spectrum.RTV, new[] { transform.SRV }, cbuffer); context.GenerateMips(spectrum.SRV); cbuffer.Dispose(); cbuffer = new DataStream(4, true, true); cbuffer.Write<float>((float)fNumber); cbuffer.Position = 0; pass.Pass(context, Encoding.ASCII.GetString(Resources.DiffractionNormalize), renderSize, destination, new[] { spectrum.SRV }, cbuffer); cbuffer.Dispose(); }
/// <summary> /// Tone-maps a texture into another. The target dimensions, /// source dimensions, and ToneMapper dimensions <b>must</b> /// be exactly the same for correct operation. /// </summary> /// <param name="context">The device context.</param> /// <param name="pass">A SurfacePass instance.</param> /// <param name="target">The render target.</param> /// <param name="source">The source texture.</param> public void ToneMap(DeviceContext context, SurfacePass pass, RenderTargetView target, ShaderResourceView source) { pass.Pass(context, averageShader, temporary.Dimensions, temporary.RTV, new[] { source }, null); context.GenerateMips(temporary.SRV); DataStream cbuffer = new DataStream(8, true, true); cbuffer.Write<float>((float)(1.0 / Gamma)); cbuffer.Write<float>((float)Exposure); cbuffer.Position = 0; pass.Pass(context, operateShader, temporary.Dimensions, target, new[] { temporary.SRV }, cbuffer); cbuffer.Dispose(); }
private void ZeroPad(Device device, DeviceContext context, SurfacePass pass, ShaderResourceView source, UnorderedAccessView target, String channel) { ViewportF viewport = new ViewportF(0, 0, resolution.Width, resolution.Height); DataStream cbuffer = new DataStream(8, true, true); cbuffer.Write<uint>((uint)resolution.Width); cbuffer.Write<uint>((uint)resolution.Height); cbuffer.Position = 0; pass.Pass(context, "#define CHANNEL " + channel + "\n" + Encoding.ASCII.GetString(Resources.ConvolutionZeroPad), viewport, null, new[] { source }, new[] { target }, cbuffer); cbuffer.Dispose(); }
private void ConvolveChannel(Device device, DeviceContext context, SurfacePass pass, ShaderResourceView sourceA, ShaderResourceView sourceB, GraphicsResource target, String channel) { if ((channel != "x") && (channel != "y") && (channel != "z")) throw new ArgumentException("Invalid RGB channel specified."); ViewportF viewport = new ViewportF(0, 0, resolution.Width, resolution.Height); ZeroPad(device, context, pass, sourceA, lBuf.view, channel); ZeroPad(device, context, pass, sourceB, rBuf.view, channel); fft.ForwardTransform(lBuf.view, tBuf.view); fft.ForwardTransform(rBuf.view, lBuf.view); DataStream cbuffer = new DataStream(8, true, true); cbuffer.Write<uint>((uint)resolution.Width); cbuffer.Write<uint>((uint)resolution.Height); cbuffer.Position = 0; pass.Pass(context, Encoding.ASCII.GetString(Resources.ConvolutionMultiply), viewport, null, null, new[] { tBuf.view, lBuf.view }, cbuffer); cbuffer.Dispose(); cbuffer = new DataStream(8, true, true); cbuffer.Write<uint>((uint)resolution.Width); cbuffer.Write<uint>((uint)resolution.Height); cbuffer.Position = 0; UnorderedAccessView fftView = fft.InverseTransform(tBuf.view); pass.Pass(context, Encoding.ASCII.GetString(Resources.ConvolutionOutput), target.Dimensions, target.RTV, null, new[] { fftView }, cbuffer); fftView.Dispose(); cbuffer.Dispose(); }
public void Convolve(Device device, DeviceContext context, SurfacePass pass, Size renderSize, RenderTargetView destination, ShaderResourceView a, ShaderResourceView b, bool scaleCorrect) { pass.Pass(context, Encoding.ASCII.GetString(Resources.ConvolutionRescale), staging.Dimensions, staging.RTV, new[] { b }, null); ConvolveChannel(device, context, pass, a, staging.SRV, rConvolved, "x"); ConvolveChannel(device, context, pass, a, staging.SRV, gConvolved, "y"); ConvolveChannel(device, context, pass, a, staging.SRV, bConvolved, "z"); if (scaleCorrect) context.OutputMerger.SetBlendState(blendState); pass.Pass(context, Encoding.ASCII.GetString(Resources.ConvolutionCompose), renderSize, destination, new[] { rConvolved.SRV, gConvolved.SRV, bConvolved.SRV }, null); // TODO: better resizing later context.OutputMerger.SetBlendState(null); }
/// <summary> /// Creates a LensFlare instance with custom settings. The graphics device /// will be reused, but will not be disposed of at instance destruction. /// </summary> /// <param name="device">The graphics device to use.</param> /// <param name="context">The device context to use.</param> /// <param name="quality">The required render quality.</param> /// <param name="profile">The desired optical profile.</param> /// <param name="options">The desired diffraction options.</param> public EyeDiffraction(Device device, DeviceContext context, RenderQuality quality, OpticalProfile profile, DiffractionOptions options) { Pass = new SurfacePass(device); composer = new ApertureComposer(device); Device = device; /* Store the device. */ Context = context; /* Save the context. */ Quality = quality; /* Validate quality. */ Profile = profile; /* Use lens profile. */ Options = options; /* Save the options. */ }
/// <summary> /// Composes an aperture. /// </summary> /// <param name="context">The device context.</param> /// <param name="output">The output render target.</param> /// <param name="profile">The optical profile to use.</param> /// <param name="pass">A SurfacePass instance to use.</param> /// <param name="time">The elapsed time.</param> /// <param name="dt">The time since last call.</param> public void Compose(DeviceContext context, GraphicsResource output, OpticalProfile profile, SurfacePass pass, double time, double dt) { context.ClearRenderTargetView(output.RTV, Color4.White); context.OutputMerger.SetBlendState(blendState); foreach (ApertureLayer layer in layers) layer.ApplyLayer(context, output, profile, pass, time, dt); context.OutputMerger.SetBlendState(null); }